c77f0a5bcc77286af395b8ed328b9bb5d98e8490,opennmt/runner.py,Runner,score,#Runner#Any#Any#Any#Any#,451

Before Change



    with tf.Graph().as_default() as g:
      tf.train.create_global_step(g)
      features, labels = input_fn()
      labels["alignment"] = None  // Add alignment key to force the model to return attention.
      outputs, _ = self._model(
          features,
          labels,

After Change


    if checkpoint_path is None:
      raise ValueError("could not find a trained model in %s" % self._config["model_dir"])

    model = copy.deepcopy(self._model)
    with tf.Graph().as_default():
      dataset = model.examples_inputter.make_evaluation_dataset(
          features_file,
          predictions_file,
          self._config["score"]["batch_size"],
          num_threads=self._config["score"].get("num_threads"),
          prefetch_buffer_size=self._config["score"].get("prefetch_buffer_size"))
      iterator = dataset.make_initializable_iterator()
      features, labels = iterator.get_next()
      labels["alignment"] = None  // Add alignment key to force the model to return attention.
      outputs, _ = model(
          features,
          labels,
          self._config["params"],
          tf.estimator.ModeKeys.EVAL)

      cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
          logits=outputs["logits"], labels=labels["ids_out"])
      weights = tf.sequence_mask(labels["length"], dtype=cross_entropy.dtype)
      masked_cross_entropy = cross_entropy * weights
      scores = tf.reduce_sum(masked_cross_entropy, axis=1)
      results = {
          "attention": outputs["attention"],
          "cross_entropy": cross_entropy,
          "score": scores,
          "tokens": labels["tokens"],
          "length": labels["length"] - 1  // -1 for the special token.
      }

      if output_file:
        stream = io.open(output_file, encoding="utf-8", mode="w")
      else:
        stream = sys.stdout

      with tf.train.MonitoredSession(
          session_creator=tf.train.ChiefSessionCreator(
              checkpoint_filename_with_path=checkpoint_path,
              config=self._session_config)) as sess:
        sess.run(iterator.initializer)
        while not sess.should_stop():
          for batch in misc.extract_batches(sess.run(results)):
            tokens = batch["tokens"][:batch["length"]]
            sentence = self._model.labels_inputter.tokenizer.detokenize(tokens)
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 3

Non-data size: 6

Instances


Project Name: OpenNMT/OpenNMT-tf
Commit Name: c77f0a5bcc77286af395b8ed328b9bb5d98e8490
Time: 2019-02-26
Author: guillaume.klein@systrangroup.com
File Name: opennmt/runner.py
Class Name: Runner
Method Name: score


Project Name: OpenNMT/OpenNMT-tf
Commit Name: b4984bf716560a3f5e71c0d5e35cf6b9bd99c211
Time: 2019-04-01
Author: guillaume.klein@systrangroup.com
File Name: opennmt/tests/model_test.py
Class Name: ModelTest
Method Name: testSequenceToSequenceWithReplaceUnknownTarget


Project Name: OpenNMT/OpenNMT-tf
Commit Name: 8566b142ddf39eb999e6765a216d54c957f526a3
Time: 2019-04-01
Author: guillaume.klein@systrangroup.com
File Name: opennmt/tests/model_test.py
Class Name: ModelTest
Method Name: _testGenericModel


Project Name: OpenNMT/OpenNMT-tf
Commit Name: c77f0a5bcc77286af395b8ed328b9bb5d98e8490
Time: 2019-02-26
Author: guillaume.klein@systrangroup.com
File Name: opennmt/runner.py
Class Name: Runner
Method Name: score