Skip to content
This repository was archived by the owner on Apr 28, 2023. It is now read-only.

Commit 12fe566

Browse files
committed
Cleanup cache .cuda artifacts and more fixes to python test
cleaning up more cache .cuda artifacts left in codebase and docs. Also fixing python test more.
1 parent f776141 commit 12fe566

File tree

6 files changed

+8
-13
lines changed

6 files changed

+8
-13
lines changed

docs/source/framework/pytorch_integration/autograd_with_tc.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -114,8 +114,8 @@ Let's see how to cache options to file when we tune a training layer.
114114
out = convolution(I, W)
115115
out[0].sum().backward()
116116
117-
You will find two cache files created: :code:`convolution_train.cuda/options` has
118-
options for the forward layer and :code:`convolution_train_backward.cuda/options` file
117+
You will find a cache file created: :code:`convolution_train.options` has
118+
options for the forward layer and :code:`convolution_train_backward.options` file
119119
has options for the grad layer.
120120

121121
Reordering grad outputs

docs/source/framework/pytorch_integration/autotuning_layers.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@ For example:
219219
tc.decode
220220
---------
221221

222-
When you save the autotuner cache, two files are created ending in :code:`.cuda/.options`.
222+
When you save the autotuner cache, one file is created ending in :code:`.options`.
223223
The :code:`.options` file contains the encoded kernel options. If you are curious
224224
about what those options look like, you can decode the options by calling :code:`tc.decode`
225225

tc/autotuner/genetic_autotuner.cc

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,7 @@ void GeneticAutotuner::storeCaches(const std::string& filename) {
5353
if (filename.empty()) {
5454
std::cout << "No filepath provided, not saving cache" << std::endl;
5555
} else {
56-
std::cout << "Dumping cache to " << filename << ".cuda/options"
57-
<< std::endl;
56+
std::cout << "Dumping cache to " << filename << ".options" << std::endl;
5857
tc::OptionsCache::getCache()->keepOnlyBestCandidates(
5958
tc::FLAGS_tuner_save_best_candidates_count);
6059
tc::OptionsCache::dumpCacheToProtobuf(tc::makeOptionsFilename(filename));
@@ -69,7 +68,7 @@ std::vector<CudaMappingOptions> GeneticAutotuner::load(
6968
const std::vector<const DLTensor*>& inputs,
7069
const size_t numCandidates) {
7170
std::cout << "Loading proto from: " << tc::makeOptionsFilename(cacheFileName)
72-
<< " and " << tc::makeCudaFilename(cacheFileName) << std::endl;
71+
<< std::endl;
7372
enableOrLoadCache(cacheFileName);
7473
tc::FLAGS_tuner_gen_restore_number =
7574
std::min(numCandidates, size_t(FLAGS_tuner_gen_pop_size) - 1);
@@ -141,7 +140,7 @@ llvm::Optional<CudaMappingOptions> GeneticAutotuner::tune(
141140
tuner.run(FLAGS_tuner_gen_generations);
142141
} catch (const std::exception& e) {
143142
std::cerr << "Exception during autotuning: " << e.what()
144-
<< "\n dumping cache to " << cacheFileName << ".cuda/options"
143+
<< "\n dumping cache to " << cacheFileName << ".options"
145144
<< std::endl;
146145
storeCaches(cacheFileName);
147146
tunerThreadEx = std::current_exception();

tc/benchmarks/benchmark_fixture.h

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -255,8 +255,7 @@ struct Benchmark : public ::testing::Test {
255255
return true;
256256
}) {
257257
std::cout << "Validating proto from: "
258-
<< tc::makeOptionsFilename(cacheFilename) << "and "
259-
<< tc::makeCudaFilename(cacheFilename) << std::endl;
258+
<< tc::makeOptionsFilename(cacheFilename) << std::endl;
260259

261260
tc::OptionsCache::enableCache();
262261
tc::OptionsCache::loadCacheFromProtobuf(cacheFilename + ".options");

tc/core/compilation_cache.h

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,4 @@ inline std::string makeOptionsFilename(const std::string& filename) {
101101
return filename + ".options";
102102
}
103103

104-
inline std::string makeCudaFilename(const std::string& filename) {
105-
return filename + ".cuda";
106-
}
107104
} // namespace tc

test_python/layers/test_autotuner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def test_autotuner_cachefile_first(self):
105105
def test_autotuner_cachefile_load(self):
106106
lang = MATMUL_LANG
107107
cache_file = "{}/matmul_100_400_500".format(PATH_PREFIX) # use argparse if input from command line
108-
assert os.path.isfile("{}.cuda".format(cache_file)), "looks like the cache_file doesn't exist"
108+
assert os.path.isfile("{}.options".format(cache_file)), "looks like the cache_file doesn't exist"
109109

110110
matmul = tc.define(lang, name="matmul")
111111
mat1, mat2 = torch.randn(100, 400).cuda(), torch.randn(400, 500).cuda()

0 commit comments

Comments
 (0)