0f4616ac9a4f4db956a4e73c70218ad113af5e68,tensorflow_datasets/scripts/cleanup/url_filename_recorder.py,,main,#Any#,61

Before Change


    for url_path in _collect_path_to_update():
      url_infos = checksums.load_url_infos(url_path)
      cached_url_infos[url_path] = url_infos
      future_filenames[url_path] = executor.map(_request_filename, url_infos)
    for path, future_filename in future_filenames.items():
      old_url_infos = cached_url_infos[path]
      updated_url_infos = _update_url_infos(old_url_infos, future_filename)
      checksums.save_url_infos(path, updated_url_infos)

After Change



  // Remove duplicate urls
  all_url_infos = {}
  for url_infos in path_to_url_infos.values():
    all_url_infos.update(url_infos)

  with futures.ThreadPoolExecutor(max_workers=100) as executor:
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 3

Non-data size: 3

Instances


Project Name: tensorflow/datasets
Commit Name: 0f4616ac9a4f4db956a4e73c70218ad113af5e68
Time: 2020-10-26
Author: devilincarcerated020@yahoo.com
File Name: tensorflow_datasets/scripts/cleanup/url_filename_recorder.py
Class Name:
Method Name: main


Project Name: SPFlow/SPFlow
Commit Name: ddf383126170561049c33b95821b5bf35971147d
Time: 2018-06-08
Author: molina@cs.tu-darmstadt.de
File Name: src/spn/structure/Base.py
Class Name:
Method Name: get_number_of_layers


Project Name: WZBSocialScienceCenter/tmtoolkit
Commit Name: 035fcc8ed9014d14b73f1ba5c7407457845d09d7
Time: 2019-06-26
Author: markus.konrad@wzb.eu
File Name: tmtoolkit/preprocess/_tmpreproc.py
Class Name: TMPreproc
Method Name: doc_lengths