From 5b634853062544399891fa6aecf600d1318cdb66 Mon Sep 17 00:00:00 2001 From: tangzhankun Date: Tue, 13 Dec 2016 16:31:25 +0800 Subject: [PATCH 01/32] Update README.txt --- README.txt | 34 +++------------------------------- 1 file changed, 3 insertions(+), 31 deletions(-) diff --git a/README.txt b/README.txt index 148cd31c86b72..9162dcc7606a9 100644 --- a/README.txt +++ b/README.txt @@ -1,31 +1,3 @@ -For the latest information about Hadoop, please visit our website at: - - http://hadoop.apache.org/core/ - -and our wiki, at: - - http://wiki.apache.org/hadoop/ - -This distribution includes cryptographic software. The country in -which you currently reside may have restrictions on the import, -possession, use, and/or re-export to another country, of -encryption software. BEFORE using any encryption software, please -check your country's laws, regulations and policies concerning the -import, possession, or use, and re-export of encryption software, to -see if this is permitted. See for more -information. - -The U.S. Government Department of Commerce, Bureau of Industry and -Security (BIS), has classified this software as Export Commodity -Control Number (ECCN) 5D002.C.1, which includes information security -software using or performing cryptographic functions with asymmetric -algorithms. The form and manner of this Apache Software Foundation -distribution makes it eligible for export under the License Exception -ENC Technology Software Unrestricted (TSU) exception (see the BIS -Export Administration Regulations, Section 740.13) for both object -code and source code. - -The following provides more details on the included cryptographic -software: - Hadoop Core uses the SSL libraries from the Jetty project written -by mortbay.org. +Tensorflow on YARN +====================== +A YARN native application for deep learning. From 39bd5a3bd5b317286ad7ed549cfc8757abc32b75 Mon Sep 17 00:00:00 2001 From: Zhong Muzhong Date: Fri, 16 Dec 2016 11:29:48 +0800 Subject: [PATCH 02/32] Tensorflow On Yarn: 1. AppMaster 2. Yarn Client 3. Tensorflow Container --- .../pom.xml | 198 +++ .../pom2.xml | 164 +++ .../tensorflow/ApplicationMaster.java | 1283 +++++++++++++++++ .../yarn/applications/tensorflow/Client.java | 758 ++++++++++ .../applications/tensorflow/ClusterSpec.java | 5 + .../applications/tensorflow/DSConstants.java | 54 + .../DistributedShellTimelinePlugin.java | 79 + .../tensorflow/Log4jPropertyHelper.java | 55 + .../tensorflow/TFAmContainer.java | 129 ++ .../applications/tensorflow/TFClient.java | 29 + .../applications/tensorflow/TFContainer.java | 112 ++ .../applications/tensorflow/TFServer.java | 90 ++ .../applications/tensorflow/package-info.java | 19 + 13 files changed, 2975 insertions(+) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom2.xml create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DistributedShellTimelinePlugin.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Log4jPropertyHelper.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/package-info.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml new file mode 100644 index 0000000000000..af94871ee16a7 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml @@ -0,0 +1,198 @@ + + + + + hadoop-yarn-applications + org.apache.hadoop + 3.0.0-alpha2-SNAPSHOT + + 4.0.0 + org.apache.hadoop + hadoop-yarn-applications-tensorflow + 3.0.0-alpha2-SNAPSHOT + Apache Hadoop YARN tensorflow + + + + ${project.parent.parent.basedir} + + + + + + org.apache.hadoop + hadoop-common + provided + + + + junit + junit + test + + + + log4j + log4j + + + commons-lang + commons-lang + + + com.google.guava + guava + + + commons-logging + commons-logging + + + commons-cli + commons-cli + + + commons-io + commons-io + + + org.apache.hadoop + hadoop-yarn-server-applicationhistoryservice + + + org.apache.hadoop + hadoop-yarn-server-timelineservice + test-jar + test + + + + org.apache.hadoop + hadoop-annotations + + + + org.apache.hadoop + hadoop-common + test-jar + test + + + + org.apache.hadoop + hadoop-yarn-api + + + + org.apache.hadoop + hadoop-yarn-common + + + + org.apache.hadoop + hadoop-yarn-client + + + + org.apache.hadoop + hadoop-yarn-server-nodemanager + test + + + + org.apache.hadoop + hadoop-yarn-server-resourcemanager + test + + + + org.apache.hadoop + hadoop-yarn-server-tests + test-jar + test + + + org.mockito + mockito-all + test + + + org.apache.hadoop + hadoop-yarn-server-timeline-pluginstorage + + + org.apache.hadoop + hadoop-yarn-server-timeline-pluginstorage + test-jar + test + + + org.apache.hadoop + hadoop-yarn-common + test-jar + test + + + org.apache.hadoop + hadoop-hdfs + test + + + org.apache.hadoop + hadoop-hdfs + test + test-jar + + + + + + + maven-jar-plugin + + + + jar + + + test-compile + + + + + + org.apache.hadoop.yarn.applications.tensorflow.Client + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + ${java.home} + + + + + + + + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom2.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom2.xml new file mode 100644 index 0000000000000..ab9b9725a9af0 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom2.xml @@ -0,0 +1,164 @@ + + + 4.0.0 + + org.apache.hadoop + yacop + 1.0-SNAPSHOT + + + 2.7.2 + UTF-8 + + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + + + junit + junit + 4.12 + test + + + org.mockito + mockito-all + 1.10.19 + test + + + + log4j + log4j + 1.2.17 + + + commons-lang + commons-lang + 2.6 + + + com.google.guava + guava + 11.0.2 + + + commons-logging + commons-logging + 1.1.3 + + + commons-cli + commons-cli + 1.2 + + + commons-io + commons-io + 2.4 + + + org.apache.hadoop + hadoop-annotations + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-api + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-common + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-client + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-registry + ${hadoop.version} + + + + org.apache.hadoop + hadoop-common + test-jar + ${hadoop.version} + test + + + org.apache.hadoop + hadoop-yarn-server-nodemanager + ${hadoop.version} + test + + + org.apache.hadoop + hadoop-yarn-server-resourcemanager + ${hadoop.version} + test + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + test + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + test + test-jar + + + org.apache.hadoop + hadoop-yarn-server-tests + test-jar + ${hadoop.version} + test + + + org.apache.curator + curator-test + 2.10.0 + test + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.6 + + + + org.apache.hadoop.yarn.applications.yacop.NClient + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.5.1 + + 1.7 + 1.7 + + + + + + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java new file mode 100644 index 0000000000000..76caf4a77ab9f --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -0,0 +1,1283 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.applications.tensorflow; + +import java.io.BufferedReader; +import java.io.DataInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.StringReader; +import java.lang.reflect.UndeclaredThrowableException; +import java.nio.ByteBuffer; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Vector; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.Credentials; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.util.ExitUtil; +import org.apache.hadoop.util.Shell; +import org.apache.hadoop.yarn.api.ApplicationConstants; +import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; +import org.apache.hadoop.yarn.api.ContainerManagementProtocol; +import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; +import org.apache.hadoop.yarn.api.records.*; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; +import org.apache.hadoop.yarn.client.api.TimelineClient; +import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync; +import org.apache.hadoop.yarn.client.api.async.NMClientAsync; +import org.apache.hadoop.yarn.client.api.async.impl.NMClientAsyncImpl; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; +import org.apache.hadoop.yarn.util.timeline.TimelineUtils; +import org.apache.log4j.LogManager; + +import com.google.common.annotations.VisibleForTesting; +import com.sun.jersey.api.client.ClientHandlerException; + + +@InterfaceAudience.Public +@InterfaceStability.Unstable +public class ApplicationMaster { + + private static final Log LOG = LogFactory.getLog(ApplicationMaster.class); + + @VisibleForTesting + @Private + public static enum DSEvent { + DS_APP_ATTEMPT_START, DS_APP_ATTEMPT_END, DS_CONTAINER_START, DS_CONTAINER_END + } + + @VisibleForTesting + @Private + public static enum DSEntity { + DS_APP_ATTEMPT, DS_CONTAINER + } + + private static final String YARN_SHELL_ID = "YARN_SHELL_ID"; + + // Configuration + private Configuration conf; + + // Handle to communicate with the Resource Manager + @SuppressWarnings("rawtypes") + private AMRMClientAsync amRMClient; + + // In both secure and non-secure modes, this points to the job-submitter. + @VisibleForTesting + UserGroupInformation appSubmitterUgi; + + // Handle to communicate with the Node Manager + private NMClientAsync nmClientAsync; + // Listen to process the response from the Node Manager + private NMCallbackHandler containerListener; + + // Application Attempt Id ( combination of attemptId and fail count ) + @VisibleForTesting + protected ApplicationAttemptId appAttemptID; + + // TODO + // For status update for clients - yet to be implemented + // Hostname of the container + private String appMasterHostname = ""; + // Port on which the app master listens for status updates from clients + private int appMasterRpcPort = -1; + // Tracking url to which app master publishes info for clients to monitor + private String appMasterTrackingUrl = ""; + + private boolean timelineServiceV2 = false; + + // App Master configuration + // No. of containers to run shell command on + @VisibleForTesting + protected int numTotalContainers = 1; + // Memory to request for the container on which the shell command will run + private long containerMemory = 10; + // VirtualCores to request for the container on which the shell command will run + private int containerVirtualCores = 1; + // Priority of the request + private int requestPriority; + + // Counter for completed containers ( complete denotes successful or failed ) + private AtomicInteger numCompletedContainers = new AtomicInteger(); + // Allocated container count so that we know how many containers has the RM + // allocated to us + @VisibleForTesting + protected AtomicInteger numAllocatedContainers = new AtomicInteger(); + // Count of failed containers + private AtomicInteger numFailedContainers = new AtomicInteger(); + // Count of containers already requested from the RM + // Needed as once requested, we should not request for containers again. + // Only request for more if the original requirement changes. + @VisibleForTesting + protected AtomicInteger numRequestedContainers = new AtomicInteger(); + + // Container retry options + private ContainerRetryPolicy containerRetryPolicy = + ContainerRetryPolicy.NEVER_RETRY; + private Set containerRetryErrorCodes = null; + private int containerMaxRetries = 0; + private int containrRetryInterval = 0; + + // Timeline domain ID + private String domainId = null; + + // TF server jar file + private String tfServerJar = ""; + + // Hardcoded path to shell script in launch container's local env + private static final String EXEC_SHELL_STRING_PATH = Client.SCRIPT_PATH + + ".sh"; + private static final String EXEC_BAT_SCRIPT_STRING_PATH = Client.SCRIPT_PATH + + ".bat"; + + // Hardcoded path to custom log_properties + private static final String log4jPath = "log4j.properties"; + + private static final String shellCommandPath = "shellCommands"; + private static final String shellArgsPath = "shellArgs"; + + private volatile boolean done; + + private ByteBuffer allTokens; + + // Launch threads + private List launchThreads = new ArrayList(); + + // Timeline Client + @VisibleForTesting + TimelineClient timelineClient; + static final String CONTAINER_ENTITY_GROUP_ID = "CONTAINERS"; + static final String APPID_TIMELINE_FILTER_NAME = "appId"; + static final String USER_TIMELINE_FILTER_NAME = "user"; + + private final String linux_bash_command = "bash"; + private final String windows_command = "cmd /c"; + + private int yarnShellIdCounter = 1; + private String tfClientPy; + + private String appName; + public String getAppName() {return appName;} + + @VisibleForTesting + protected final Set launchedContainers = + Collections.newSetFromMap(new ConcurrentHashMap()); + + /** + * @param args TF server args + */ + public static void main(String[] args) { + boolean result = false; + try { + ApplicationMaster appMaster = new ApplicationMaster(); + LOG.info("Initializing ApplicationMaster"); + boolean doRun = appMaster.init(args); + if (!doRun) { + System.exit(0); + } + appMaster.run(); + result = appMaster.finish(); + } catch (Throwable t) { + LOG.fatal("Error running ApplicationMaster", t); + LogManager.shutdown(); + ExitUtil.terminate(1, t); + } + if (result) { + LOG.info("Application Master completed successfully. exiting"); + System.exit(0); + } else { + LOG.info("Application Master failed. exiting"); + System.exit(2); + } + } + + /** + * Dump out contents of $CWD and the environment to stdout for debugging + */ + private void dumpOutDebugInfo() { + + LOG.info("Dump debug output"); + Map envs = System.getenv(); + for (Map.Entry env : envs.entrySet()) { + LOG.info("System env: key=" + env.getKey() + ", val=" + env.getValue()); + System.out.println("System env: key=" + env.getKey() + ", val=" + + env.getValue()); + } + + BufferedReader buf = null; + try { + String lines = Shell.WINDOWS ? Shell.execCommand("cmd", "/c", "dir") : + Shell.execCommand("ls", "-al"); + buf = new BufferedReader(new StringReader(lines)); + String line = ""; + while ((line = buf.readLine()) != null) { + LOG.info("System CWD content: " + line); + System.out.println("System CWD content: " + line); + } + } catch (IOException e) { + e.printStackTrace(); + } finally { + IOUtils.cleanup(LOG, buf); + } + } + + public ApplicationMaster() { + // Set up the configuration + conf = new YarnConfiguration(); + } + + /** + * Parse command line options + * + * @param args Command line args + * @return Whether init successful and run should be invoked + * @throws ParseException + * @throws IOException + */ + public boolean init(String[] args) throws ParseException, IOException { + Options opts = new Options(); + opts.addOption("app_attempt_id", true, + "App Attempt ID. Not to be used unless for testing purposes"); + opts.addOption("container_memory", true, + "Amount of memory in MB to be requested to run the shell command"); + opts.addOption("container_vcores", true, + "Amount of virtual cores to be requested to run the shell command"); + opts.addOption("num_containers", true, + "No. of containers on which the shell command needs to be executed"); + opts.addOption("priority", true, "Application Priority. Default 0"); + opts.addOption("container_retry_policy", true, + "Retry policy when container fails to run, " + + "0: NEVER_RETRY, 1: RETRY_ON_ALL_ERRORS, " + + "2: RETRY_ON_SPECIFIC_ERROR_CODES"); + opts.addOption("container_retry_error_codes", true, + "When retry policy is set to RETRY_ON_SPECIFIC_ERROR_CODES, error " + + "codes is specified with this option, " + + "e.g. --container_retry_error_codes 1,2,3"); + opts.addOption("container_max_retries", true, + "If container could retry, it specifies max retires"); + opts.addOption("container_retry_interval", true, + "Interval between each retry, unit is milliseconds"); + opts.addOption("debug", false, "Dump out debug information"); + + opts.addOption("help", false, "Print usage"); + + opts.addOption("jar", true, "Jar file containing the tensorflow server"); + opts.addOption(Client.OPT_TF_CLIENT, true, "Provide client python of tensorflow"); + opts.addOption(Client.OPT_TF_SERVER_JAR, true, "Provide container jar of tensorflow"); + LOG.info("print args"); + for (String arg : args) { + LOG.info(arg); + + } + CommandLine cliParser = new GnuParser().parse(opts, args); + + if (args.length == 0) { + printUsage(opts); + throw new IllegalArgumentException( + "No args specified for application master to initialize"); + } + + //Check whether customer log4j.properties file exists + if (fileExist(log4jPath)) { + try { + Log4jPropertyHelper.updateLog4jConfiguration(ApplicationMaster.class, + log4jPath); + } catch (Exception e) { + LOG.warn("Can not set up custom log4j properties. " + e); + } + } + + if (cliParser.hasOption("help")) { + printUsage(opts); + return false; + } + + if (cliParser.hasOption("debug")) { + dumpOutDebugInfo(); + } + + Map envs = System.getenv(); + + if (!envs.containsKey(Environment.CONTAINER_ID.name())) { + if (cliParser.hasOption("app_attempt_id")) { + String appIdStr = cliParser.getOptionValue("app_attempt_id", ""); + appAttemptID = ApplicationAttemptId.fromString(appIdStr); + } else { + throw new IllegalArgumentException( + "Application Attempt Id not set in the environment"); + } + } else { + ContainerId containerId = ContainerId.fromString(envs + .get(Environment.CONTAINER_ID.name())); + appAttemptID = containerId.getApplicationAttemptId(); + } + + if (!envs.containsKey(ApplicationConstants.APP_SUBMIT_TIME_ENV)) { + throw new RuntimeException(ApplicationConstants.APP_SUBMIT_TIME_ENV + + " not set in the environment"); + } + if (!envs.containsKey(Environment.NM_HOST.name())) { + throw new RuntimeException(Environment.NM_HOST.name() + + " not set in the environment"); + } + if (!envs.containsKey(Environment.NM_HTTP_PORT.name())) { + throw new RuntimeException(Environment.NM_HTTP_PORT + + " not set in the environment"); + } + if (!envs.containsKey(Environment.NM_PORT.name())) { + throw new RuntimeException(Environment.NM_PORT.name() + + " not set in the environment"); + } + + LOG.info("Application master for app" + ", appId=" + + appAttemptID.getApplicationId().getId() + ", clustertimestamp=" + + appAttemptID.getApplicationId().getClusterTimestamp() + + ", attemptId=" + appAttemptID.getAttemptId()); + + containerMemory = Integer.parseInt(cliParser.getOptionValue( + "container_memory", "10")); + containerVirtualCores = Integer.parseInt(cliParser.getOptionValue( + "container_vcores", "1")); + numTotalContainers = Integer.parseInt(cliParser.getOptionValue( + "num_containers", "1")); + if (numTotalContainers == 0) { + throw new IllegalArgumentException( + "Cannot run distributed shell with no containers"); + } + requestPriority = Integer.parseInt(cliParser + .getOptionValue("priority", "0")); + + containerRetryPolicy = ContainerRetryPolicy.values()[ + Integer.parseInt(cliParser.getOptionValue( + "container_retry_policy", "0"))]; + if (cliParser.hasOption("container_retry_error_codes")) { + containerRetryErrorCodes = new HashSet<>(); + for (String errorCode : + cliParser.getOptionValue("container_retry_error_codes").split(",")) { + containerRetryErrorCodes.add(Integer.parseInt(errorCode)); + } + } + containerMaxRetries = Integer.parseInt( + cliParser.getOptionValue("container_max_retries", "0")); + containrRetryInterval = Integer.parseInt(cliParser.getOptionValue( + "container_retry_interval", "0")); + + if (cliParser.hasOption(Client.OPT_TF_CLIENT)) { + + tfClientPy = cliParser.getOptionValue(Client.OPT_TF_CLIENT); + if (null == tfClientPy) { + LOG.error("tf_client is empty!"); + tfClientPy = TFClient.DEFAULT_TF_CLIENT_PY; + } + } + + if (cliParser.hasOption(Client.OPT_TF_SERVER_JAR)) { + tfServerJar = cliParser.getOptionValue(Client.OPT_TF_SERVER_JAR); + } + + + if (YarnConfiguration.timelineServiceEnabled(conf)) { + timelineServiceV2 = YarnConfiguration.timelineServiceV2Enabled(conf); + } else { + timelineClient = null; + LOG.warn("Timeline service is not enabled"); + } + + return true; + } + + /** + * Helper function to print usage + * + * @param opts Parsed command line options + */ + private void printUsage(Options opts) { + new HelpFormatter().printHelp("ApplicationMaster", opts); + } + + /** + * Main run function for the application master + * + * @throws YarnException + * @throws IOException + */ + @SuppressWarnings({ "unchecked" }) + public void run() throws YarnException, IOException, InterruptedException { + LOG.info("Starting ApplicationMaster"); + + // Note: Credentials, Token, UserGroupInformation, DataOutputBuffer class + // are marked as LimitedPrivate + Credentials credentials = + UserGroupInformation.getCurrentUser().getCredentials(); + DataOutputBuffer dob = new DataOutputBuffer(); + credentials.writeTokenStorageToStream(dob); + // Now remove the AM->RM token so that containers cannot access it. + Iterator> iter = credentials.getAllTokens().iterator(); + LOG.info("Executing with tokens:"); + while (iter.hasNext()) { + Token token = iter.next(); + LOG.info(token); + if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) { + iter.remove(); + } + } + allTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); + + // Create appSubmitterUgi and add original tokens to it + String appSubmitterUserName = + System.getenv(ApplicationConstants.Environment.USER.name()); + appSubmitterUgi = + UserGroupInformation.createRemoteUser(appSubmitterUserName); + appSubmitterUgi.addCredentials(credentials); + + AMRMClientAsync.AbstractCallbackHandler allocListener = + new RMCallbackHandler(); + amRMClient = AMRMClientAsync.createAMRMClientAsync(1000, allocListener); + amRMClient.init(conf); + amRMClient.start(); + + containerListener = createNMCallbackHandler(); + nmClientAsync = new NMClientAsyncImpl(containerListener); + nmClientAsync.init(conf); + nmClientAsync.start(); + + startTimelineClient(conf); + if (timelineServiceV2) { + // need to bind timelineClient + amRMClient.registerTimelineClient(timelineClient); + } + if(timelineClient != null) { + if (timelineServiceV2) { + publishApplicationAttemptEventOnTimelineServiceV2( + DSEvent.DS_APP_ATTEMPT_START); + } else { + publishApplicationAttemptEvent(timelineClient, appAttemptID.toString(), + DSEvent.DS_APP_ATTEMPT_START, domainId, appSubmitterUgi); + } + } + + // Setup local RPC Server to accept status requests directly from clients + // TODO need to setup a protocol for client to be able to communicate to + // the RPC server + // TODO use the rpc port info to register with the RM for the client to + // send requests to this app master + + // Register self with ResourceManager + // This will start heartbeating to the RM + appMasterHostname = NetUtils.getHostname(); + RegisterApplicationMasterResponse response = amRMClient + .registerApplicationMaster(appMasterHostname, appMasterRpcPort, + appMasterTrackingUrl); + // Dump out information about cluster capability as seen by the + // resource manager + long maxMem = response.getMaximumResourceCapability().getMemorySize(); + LOG.info("Max mem capability of resources in this cluster " + maxMem); + + int maxVCores = response.getMaximumResourceCapability().getVirtualCores(); + LOG.info("Max vcores capability of resources in this cluster " + maxVCores); + + // A resource ask cannot exceed the max. + if (containerMemory > maxMem) { + LOG.info("Container memory specified above max threshold of cluster." + + " Using max value." + ", specified=" + containerMemory + ", max=" + + maxMem); + containerMemory = maxMem; + } + + if (containerVirtualCores > maxVCores) { + LOG.info("Container virtual cores specified above max threshold of cluster." + + " Using max value." + ", specified=" + containerVirtualCores + ", max=" + + maxVCores); + containerVirtualCores = maxVCores; + } + + List previousAMRunningContainers = + response.getContainersFromPreviousAttempts(); + LOG.info(appAttemptID + " received " + previousAMRunningContainers.size() + + " previous attempts' running containers on AM registration."); + for(Container container: previousAMRunningContainers) { + launchedContainers.add(container.getId()); + } + numAllocatedContainers.addAndGet(previousAMRunningContainers.size()); + + + int numTotalContainersToRequest = + numTotalContainers - previousAMRunningContainers.size(); + // Setup ask for containers from RM + // Send request for containers to RM + // Until we get our fully allocated quota, we keep on polling RM for + // containers + // Keep looping until all the containers are launched and shell script + // executed on them ( regardless of success/failure). + for (int i = 0; i < numTotalContainersToRequest; ++i) { + ContainerRequest containerAsk = setupContainerAskForRM(); + amRMClient.addContainerRequest(containerAsk); + } + numRequestedContainers.set(numTotalContainers); + + createTFClientThread(); + } + + private class TFClientThread implements Runnable { + + @Override + public void run() { + LOG.info("tensorflow client will be run!"); + TFClient tfClient = new TFClient(tfClientPy); + tfClient.run(); + } + } + + private void createTFClientThread() { + + Thread thread = new Thread(new TFClientThread()); + thread.start(); + } + + @VisibleForTesting + void startTimelineClient(final Configuration conf) + throws YarnException, IOException, InterruptedException { + try { + appSubmitterUgi.doAs(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + if (YarnConfiguration.timelineServiceEnabled(conf)) { + // Creating the Timeline Client + if (timelineServiceV2) { + timelineClient = TimelineClient.createTimelineClient( + appAttemptID.getApplicationId()); + LOG.info("Timeline service V2 client is enabled"); + } else { + timelineClient = TimelineClient.createTimelineClient(); + LOG.info("Timeline service V1 client is enabled"); + } + timelineClient.init(conf); + timelineClient.start(); + } else { + timelineClient = null; + LOG.warn("Timeline service is not enabled"); + } + return null; + } + }); + } catch (UndeclaredThrowableException e) { + throw new YarnException(e.getCause()); + } + } + + @VisibleForTesting + NMCallbackHandler createNMCallbackHandler() { + return new NMCallbackHandler(this); + } + + @VisibleForTesting + protected boolean finish() { + // wait for completion. + while (!done + && (numCompletedContainers.get() != numTotalContainers)) { + try { + Thread.sleep(200); + } catch (InterruptedException ex) {} + } + + if (timelineClient != null) { + if (timelineServiceV2) { + publishApplicationAttemptEventOnTimelineServiceV2( + DSEvent.DS_APP_ATTEMPT_END); + } else { + publishApplicationAttemptEvent(timelineClient, appAttemptID.toString(), + DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi); + } + } + + // Join all launched threads + // needed for when we time out + // and we need to release containers + for (Thread launchThread : launchThreads) { + try { + launchThread.join(10000); + } catch (InterruptedException e) { + LOG.info("Exception thrown in thread join: " + e.getMessage()); + e.printStackTrace(); + } + } + + // When the application completes, it should stop all running containers + LOG.info("Application completed. Stopping running containers"); + nmClientAsync.stop(); + + // When the application completes, it should send a finish application + // signal to the RM + LOG.info("Application completed. Signalling finish to RM"); + + FinalApplicationStatus appStatus; + String appMessage = null; + boolean success = true; + if (numCompletedContainers.get() - numFailedContainers.get() + >= numTotalContainers) { + appStatus = FinalApplicationStatus.SUCCEEDED; + } else { + appStatus = FinalApplicationStatus.FAILED; + appMessage = "Diagnostics." + ", total=" + numTotalContainers + + ", completed=" + numCompletedContainers.get() + ", allocated=" + + numAllocatedContainers.get() + ", failed=" + + numFailedContainers.get(); + LOG.info(appMessage); + success = false; + } + try { + amRMClient.unregisterApplicationMaster(appStatus, appMessage, null); + } catch (YarnException ex) { + LOG.error("Failed to unregister application", ex); + } catch (IOException e) { + LOG.error("Failed to unregister application", e); + } + + amRMClient.stop(); + + // Stop Timeline Client + if(timelineClient != null) { + timelineClient.stop(); + } + + return success; + } + + @VisibleForTesting + class RMCallbackHandler extends AMRMClientAsync.AbstractCallbackHandler { + @SuppressWarnings("unchecked") + @Override + public void onContainersCompleted(List completedContainers) { + LOG.info("Got response from RM for container ask, completedCnt=" + + completedContainers.size()); + for (ContainerStatus containerStatus : completedContainers) { + LOG.info(appAttemptID + " got container status for containerID=" + + containerStatus.getContainerId() + ", state=" + + containerStatus.getState() + ", exitStatus=" + + containerStatus.getExitStatus() + ", diagnostics=" + + containerStatus.getDiagnostics()); + + // non complete containers should not be here + assert (containerStatus.getState() == ContainerState.COMPLETE); + // ignore containers we know nothing about - probably from a previous + // attempt + if (!launchedContainers.contains(containerStatus.getContainerId())) { + LOG.info("Ignoring completed status of " + + containerStatus.getContainerId() + + "; unknown container(probably launched by previous attempt)"); + continue; + } + + // increment counters for completed/failed containers + int exitStatus = containerStatus.getExitStatus(); + if (0 != exitStatus) { + // container failed + if (ContainerExitStatus.ABORTED != exitStatus) { + // shell script failed + // counts as completed + numCompletedContainers.incrementAndGet(); + numFailedContainers.incrementAndGet(); + } else { + // container was killed by framework, possibly preempted + // we should re-try as the container was lost for some reason + numAllocatedContainers.decrementAndGet(); + numRequestedContainers.decrementAndGet(); + // we do not need to release the container as it would be done + // by the RM + } + } else { + // nothing to do + // container completed successfully + numCompletedContainers.incrementAndGet(); + LOG.info("Container completed successfully." + ", containerId=" + + containerStatus.getContainerId()); + } + if(timelineClient != null) { + if (timelineServiceV2) { + publishContainerEndEventOnTimelineServiceV2(containerStatus); + } else { + publishContainerEndEvent( + timelineClient, containerStatus, domainId, appSubmitterUgi); + } + } + } + + // ask for more containers if any failed + int askCount = numTotalContainers - numRequestedContainers.get(); + numRequestedContainers.addAndGet(askCount); + + if (askCount > 0) { + for (int i = 0; i < askCount; ++i) { + ContainerRequest containerAsk = setupContainerAskForRM(); + amRMClient.addContainerRequest(containerAsk); + } + } + + if (numCompletedContainers.get() == numTotalContainers) { + done = true; + } + } + + @Override + public void onContainersAllocated(List allocatedContainers) { + LOG.info("Got response from RM for container ask, allocatedCnt=" + + allocatedContainers.size()); + numAllocatedContainers.addAndGet(allocatedContainers.size()); + for (Container allocatedContainer : allocatedContainers) { + String yarnShellId = Integer.toString(yarnShellIdCounter); + yarnShellIdCounter++; + LOG.info("Launching shell command on a new container." + + ", containerId=" + allocatedContainer.getId() + + ", yarnShellId=" + yarnShellId + + ", containerNode=" + allocatedContainer.getNodeId().getHost() + + ":" + allocatedContainer.getNodeId().getPort() + + ", containerNodeURI=" + allocatedContainer.getNodeHttpAddress() + + ", containerResourceMemory" + + allocatedContainer.getResource().getMemorySize() + + ", containerResourceVirtualCores" + + allocatedContainer.getResource().getVirtualCores()); + // + ", containerToken" + // +allocatedContainer.getContainerToken().getIdentifier().toString()); + + Thread launchThread = createLaunchContainerThread(allocatedContainer, + yarnShellId); + + // launch and start the container on a separate thread to keep + // the main thread unblocked + // as all containers may not be allocated at one go. + launchThreads.add(launchThread); + launchedContainers.add(allocatedContainer.getId()); + launchThread.start(); + } + } + + @Override + public void onContainersUpdated( + List containers) {} + + @Override + public void onShutdownRequest() { + done = true; + } + + @Override + public void onNodesUpdated(List updatedNodes) {} + + @Override + public float getProgress() { + // set progress to deliver to RM on next heartbeat + float progress = (float) numCompletedContainers.get() + / numTotalContainers; + return progress; + } + + @Override + public void onError(Throwable e) { + LOG.error("Error in RMCallbackHandler: ", e); + done = true; + amRMClient.stop(); + } + } + + @VisibleForTesting + static class NMCallbackHandler extends NMClientAsync.AbstractCallbackHandler { + + private ConcurrentMap containers = + new ConcurrentHashMap(); + private final ApplicationMaster applicationMaster; + + public NMCallbackHandler(ApplicationMaster applicationMaster) { + this.applicationMaster = applicationMaster; + } + + public void addContainer(ContainerId containerId, Container container) { + containers.putIfAbsent(containerId, container); + } + + @Override + public void onContainerStopped(ContainerId containerId) { + if (LOG.isDebugEnabled()) { + LOG.debug("Succeeded to stop Container " + containerId); + } + containers.remove(containerId); + } + + @Override + public void onContainerStatusReceived(ContainerId containerId, + ContainerStatus containerStatus) { + if (LOG.isDebugEnabled()) { + LOG.debug("Container Status: id=" + containerId + ", status=" + + containerStatus); + } + } + + @Override + public void onContainerStarted(ContainerId containerId, + Map allServiceResponse) { + if (LOG.isDebugEnabled()) { + LOG.debug("Succeeded to start Container " + containerId); + } + Container container = containers.get(containerId); + if (container != null) { + applicationMaster.nmClientAsync.getContainerStatusAsync( + containerId, container.getNodeId()); + } + if(applicationMaster.timelineClient != null) { + if (applicationMaster.timelineServiceV2) { + applicationMaster.publishContainerStartEventOnTimelineServiceV2( + container); + } else { + applicationMaster.publishContainerStartEvent( + applicationMaster.timelineClient, container, + applicationMaster.domainId, applicationMaster.appSubmitterUgi); + } + } + } + + @Override + public void onContainerResourceIncreased( + ContainerId containerId, Resource resource) {} + + @Override + public void onStartContainerError(ContainerId containerId, Throwable t) { + LOG.error("Failed to start Container " + containerId); + containers.remove(containerId); + applicationMaster.numCompletedContainers.incrementAndGet(); + applicationMaster.numFailedContainers.incrementAndGet(); + } + + @Override + public void onGetContainerStatusError( + ContainerId containerId, Throwable t) { + LOG.error("Failed to query the status of Container " + containerId); + } + + @Override + public void onStopContainerError(ContainerId containerId, Throwable t) { + LOG.error("Failed to stop Container " + containerId); + containers.remove(containerId); + } + + @Override + public void onIncreaseContainerResourceError( + ContainerId containerId, Throwable t) {} + + } + + /** + * Thread to connect to the {@link ContainerManagementProtocol} and launch the container + * that will execute the shell command. + */ + private class LaunchContainerRunnable implements Runnable { + + // Allocated container + private Container container; + private String shellId; + + NMCallbackHandler containerListener; + + /** + * @param lcontainer Allocated container + * @param containerListener Callback handler of the container + */ + public LaunchContainerRunnable(Container lcontainer, + NMCallbackHandler containerListener, String shellId) { + this.container = lcontainer; + this.containerListener = containerListener; + this.shellId = shellId; + } + + @Override + /** + * Connects to CM, sets up container launch context + * for shell command and eventually dispatches the container + * start request to the CM. + */ + public void run() { + LOG.info("Setting up container launch container for containerid=" + + container.getId() + " with shellid=" + shellId); + + // Set the env variables to be setup in the env where the tf servers will be run + LOG.info("Set the environment for the tf servers"); + + FileSystem fs = null; + try { + fs = FileSystem.get(conf); + } catch (IOException e) { + e.printStackTrace(); + } + + TFContainer tfContainer = new TFContainer(ApplicationMaster.this); + // Set the java environment + Map env = tfContainer.setJavaEnv(conf, tfServerJar); + + // Set the local resources + Map localResources = new HashMap(); + + ApplicationId appId = appAttemptID.getApplicationId(); + String tfServerPy = "tf_server.py"; + try { + tfContainer.addToLocalResources(fs, "/home/muzhongz/" + tfServerPy, TFContainer.SERVER_PY_PATH, appId.toString(), + localResources, null); + } catch (IOException e) { + e.printStackTrace(); + } + + StringBuilder command = tfContainer.makeCommands(containerMemory); + + List commands = new ArrayList(); + commands.add(command.toString()); + + // Set up ContainerLaunchContext, setting local resource, environment, + // command and token for constructor. + + // Note for tokens: Set up tokens for the container too. Today, for normal + // shell commands, the container in distribute-shell doesn't need any + // tokens. We are populating them mainly for NodeManagers to be able to + // download anyfiles in the distributed file-system. The tokens are + // otherwise also useful in cases, for e.g., when one is running a + // "hadoop dfs" command inside the distributed shell. + + ContainerRetryContext containerRetryContext = + ContainerRetryContext.newInstance( + containerRetryPolicy, containerRetryErrorCodes, + containerMaxRetries, containrRetryInterval); + LOG.info("complete container command is " + command.toString()); + ContainerLaunchContext ctx = ContainerLaunchContext.newInstance( + localResources, env, commands, null, allTokens.duplicate(), + null, containerRetryContext); + containerListener.addContainer(container.getId(), container); + nmClientAsync.startContainerAsync(container, ctx); + } + + } + + /** + * Setup the request that will be sent to the RM for the container ask. + * + * @return the setup ResourceRequest to be sent to RM + */ + private ContainerRequest setupContainerAskForRM() { + // setup requirements for hosts + // using * as any host will do for the distributed shell app + // set the priority for the request + // TODO - what is the range for priority? how to decide? + Priority pri = Priority.newInstance(requestPriority); + + // Set up resource type requirements + // For now, memory and CPU are supported so we set memory and cpu requirements + Resource capability = Resource.newInstance(containerMemory, + containerVirtualCores); + + ContainerRequest request = new ContainerRequest(capability, null, null, + pri); + LOG.info("Requested container ask: " + request.toString()); + return request; + } + + private boolean fileExist(String filePath) { + return new File(filePath).exists(); + } + + private String readContent(String filePath) throws IOException { + DataInputStream ds = null; + try { + ds = new DataInputStream(new FileInputStream(filePath)); + return ds.readUTF(); + } finally { + org.apache.commons.io.IOUtils.closeQuietly(ds); + } + } + + private void publishContainerStartEvent( + final TimelineClient timelineClient, final Container container, + String domainId, UserGroupInformation ugi) { + final TimelineEntity entity = new TimelineEntity(); + entity.setEntityId(container.getId().toString()); + entity.setEntityType(DSEntity.DS_CONTAINER.toString()); + entity.setDomainId(domainId); + entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName()); + entity.addPrimaryFilter(APPID_TIMELINE_FILTER_NAME, container.getId() + .getApplicationAttemptId().getApplicationId().toString()); + TimelineEvent event = new TimelineEvent(); + event.setTimestamp(System.currentTimeMillis()); + event.setEventType(DSEvent.DS_CONTAINER_START.toString()); + event.addEventInfo("Node", container.getNodeId().toString()); + event.addEventInfo("Resources", container.getResource().toString()); + entity.addEvent(event); + + try { + processTimelineResponseErrors( + putContainerEntity(timelineClient, + container.getId().getApplicationAttemptId(), + entity)); + } catch (YarnException | IOException | ClientHandlerException e) { + LOG.error("Container start event could not be published for " + + container.getId().toString(), e); + } + } + + @VisibleForTesting + void publishContainerEndEvent( + final TimelineClient timelineClient, ContainerStatus container, + String domainId, UserGroupInformation ugi) { + final TimelineEntity entity = new TimelineEntity(); + entity.setEntityId(container.getContainerId().toString()); + entity.setEntityType(DSEntity.DS_CONTAINER.toString()); + entity.setDomainId(domainId); + entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName()); + entity.addPrimaryFilter(APPID_TIMELINE_FILTER_NAME, + container.getContainerId().getApplicationAttemptId() + .getApplicationId().toString()); + TimelineEvent event = new TimelineEvent(); + event.setTimestamp(System.currentTimeMillis()); + event.setEventType(DSEvent.DS_CONTAINER_END.toString()); + event.addEventInfo("State", container.getState().name()); + event.addEventInfo("Exit Status", container.getExitStatus()); + entity.addEvent(event); + try { + processTimelineResponseErrors( + putContainerEntity(timelineClient, + container.getContainerId().getApplicationAttemptId(), + entity)); + } catch (YarnException | IOException | ClientHandlerException e) { + LOG.error("Container end event could not be published for " + + container.getContainerId().toString(), e); + } + } + + private TimelinePutResponse putContainerEntity( + TimelineClient timelineClient, ApplicationAttemptId currAttemptId, + TimelineEntity entity) + throws YarnException, IOException { + if (TimelineUtils.timelineServiceV1_5Enabled(conf)) { + TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance( + currAttemptId.getApplicationId(), + CONTAINER_ENTITY_GROUP_ID); + return timelineClient.putEntities(currAttemptId, groupId, entity); + } else { + return timelineClient.putEntities(entity); + } + } + + private void publishApplicationAttemptEvent( + final TimelineClient timelineClient, String appAttemptId, + DSEvent appEvent, String domainId, UserGroupInformation ugi) { + final TimelineEntity entity = new TimelineEntity(); + entity.setEntityId(appAttemptId); + entity.setEntityType(DSEntity.DS_APP_ATTEMPT.toString()); + entity.setDomainId(domainId); + entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName()); + TimelineEvent event = new TimelineEvent(); + event.setEventType(appEvent.toString()); + event.setTimestamp(System.currentTimeMillis()); + entity.addEvent(event); + try { + TimelinePutResponse response = timelineClient.putEntities(entity); + processTimelineResponseErrors(response); + } catch (YarnException | IOException | ClientHandlerException e) { + LOG.error("App Attempt " + + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? "start" : "end") + + " event could not be published for " + + appAttemptID, e); + } + } + + private TimelinePutResponse processTimelineResponseErrors( + TimelinePutResponse response) { + List errors = response.getErrors(); + if (errors.size() == 0) { + LOG.debug("Timeline entities are successfully put"); + } else { + for (TimelinePutResponse.TimelinePutError error : errors) { + LOG.error( + "Error when publishing entity [" + error.getEntityType() + "," + + error.getEntityId() + "], server side error code: " + + error.getErrorCode()); + } + } + return response; + } + + RMCallbackHandler getRMCallbackHandler() { + return new RMCallbackHandler(); + } + + @VisibleForTesting + void setAmRMClient(AMRMClientAsync client) { + this.amRMClient = client; + } + + @VisibleForTesting + int getNumCompletedContainers() { + return numCompletedContainers.get(); + } + + @VisibleForTesting + boolean getDone() { + return done; + } + + @VisibleForTesting + Thread createLaunchContainerThread(Container allocatedContainer, + String shellId) { + LaunchContainerRunnable runnableLaunchContainer = + new LaunchContainerRunnable(allocatedContainer, containerListener, + shellId); + return new Thread(runnableLaunchContainer); + } + + private void publishContainerStartEventOnTimelineServiceV2( + Container container) { + final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity + entity = + new org.apache.hadoop.yarn.api.records.timelineservice. + TimelineEntity(); + entity.setId(container.getId().toString()); + entity.setType(DSEntity.DS_CONTAINER.toString()); + long ts = System.currentTimeMillis(); + entity.setCreatedTime(ts); + entity.addInfo("user", appSubmitterUgi.getShortUserName()); + + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = + new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); + event.setTimestamp(ts); + event.setId(DSEvent.DS_CONTAINER_START.toString()); + event.addInfo("Node", container.getNodeId().toString()); + event.addInfo("Resources", container.getResource().toString()); + entity.addEvent(event); + + try { + appSubmitterUgi.doAs(new PrivilegedExceptionAction() { + @Override + public TimelinePutResponse run() throws Exception { + timelineClient.putEntities(entity); + return null; + } + }); + } catch (Exception e) { + LOG.error("Container start event could not be published for " + + container.getId().toString(), + e instanceof UndeclaredThrowableException ? e.getCause() : e); + } + } + + private void publishContainerEndEventOnTimelineServiceV2( + final ContainerStatus container) { + final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity + entity = + new org.apache.hadoop.yarn.api.records.timelineservice. + TimelineEntity(); + entity.setId(container.getContainerId().toString()); + entity.setType(DSEntity.DS_CONTAINER.toString()); + //entity.setDomainId(domainId); + entity.addInfo("user", appSubmitterUgi.getShortUserName()); + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = + new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); + event.setTimestamp(System.currentTimeMillis()); + event.setId(DSEvent.DS_CONTAINER_END.toString()); + event.addInfo("State", container.getState().name()); + event.addInfo("Exit Status", container.getExitStatus()); + entity.addEvent(event); + + try { + appSubmitterUgi.doAs(new PrivilegedExceptionAction() { + @Override + public TimelinePutResponse run() throws Exception { + timelineClient.putEntities(entity); + return null; + } + }); + } catch (Exception e) { + LOG.error("Container end event could not be published for " + + container.getContainerId().toString(), + e instanceof UndeclaredThrowableException ? e.getCause() : e); + } + } + + private void publishApplicationAttemptEventOnTimelineServiceV2( + DSEvent appEvent) { + final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity + entity = + new org.apache.hadoop.yarn.api.records.timelineservice. + TimelineEntity(); + entity.setId(appAttemptID.toString()); + entity.setType(DSEntity.DS_APP_ATTEMPT.toString()); + long ts = System.currentTimeMillis(); + if (appEvent == DSEvent.DS_APP_ATTEMPT_START) { + entity.setCreatedTime(ts); + } + entity.addInfo("user", appSubmitterUgi.getShortUserName()); + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = + new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); + event.setId(appEvent.toString()); + event.setTimestamp(ts); + entity.addEvent(event); + + try { + appSubmitterUgi.doAs(new PrivilegedExceptionAction() { + @Override + public TimelinePutResponse run() throws Exception { + timelineClient.putEntitiesAsync(entity); + return null; + } + }); + } catch (Exception e) { + LOG.error("App Attempt " + + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? "start" : "end") + + " event could not be published for " + + appAttemptID, + e instanceof UndeclaredThrowableException ? e.getCause() : e); + } + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java new file mode 100644 index 0000000000000..b8fa744f74156 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -0,0 +1,758 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.applications.tensorflow; + +import org.apache.commons.cli.*; +import org.apache.commons.io.IOUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.security.Credentials; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.yarn.api.ApplicationClientProtocol; +import org.apache.hadoop.yarn.api.ApplicationConstants; +import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; +import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; +import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; +import org.apache.hadoop.yarn.api.records.*; +import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain; +import org.apache.hadoop.yarn.client.api.TimelineClient; +import org.apache.hadoop.yarn.client.api.YarnClient; +import org.apache.hadoop.yarn.client.api.YarnClientApplication; +import org.apache.hadoop.yarn.client.util.YarnClientUtils; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.util.timeline.TimelineUtils; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.*; + +/** + * Client for Distributed Shell application submission to YARN. + * + *

The distributed shell client allows an application master to be launched that in turn would run + * the provided shell command on a set of containers.

+ * + *

This client is meant to act as an example on how to write yarn-based applications.

+ * + *

To submit an application, a client first needs to connect to the ResourceManager + * aka ApplicationsManager or ASM via the {@link ApplicationClientProtocol}. The {@link ApplicationClientProtocol} + * provides a way for the client to get access to cluster information and to request for a + * new {@link ApplicationId}.

+ * + *

For the actual job submission, the client first has to create an {@link ApplicationSubmissionContext}. + * The {@link ApplicationSubmissionContext} defines the application details such as {@link ApplicationId} + * and application name, the priority assigned to the application and the queue + * to which this application needs to be assigned. In addition to this, the {@link ApplicationSubmissionContext} + * also defines the {@link ContainerLaunchContext} which describes the Container with which + * the {@link ApplicationMaster} is launched.

+ * + *

The {@link ContainerLaunchContext} in this scenario defines the resources to be allocated for the + * {@link ApplicationMaster}'s container, the local resources (jars, configuration files) to be made available + * and the environment to be set for the {@link ApplicationMaster} and the commands to be executed to run the + * {@link ApplicationMaster}.

+ * + *

Using the {@link ApplicationSubmissionContext}, the client submits the application to the + * ResourceManager and then monitors the application by requesting the ResourceManager + * for an {@link ApplicationReport} at regular time intervals. In case of the application taking too long, the client + * kills the application by submitting a {@link KillApplicationRequest} to the ResourceManager.

+ * + */ +@InterfaceAudience.Public +@InterfaceStability.Unstable +public class Client { + + private static final Log LOG = LogFactory.getLog(Client.class); + + // Configuration + private Configuration conf; + private YarnClient yarnClient; + // Application master specific info to register a new Application with RM/ASM + private String appName = ""; + public String getAppName() { + return appName; + } + // App master priority + private int amPriority = 0; + // Queue for App master + private String amQueue = ""; + // Amt. of memory resource to request for to run the App Master + private long amMemory = 100; + // Amt. of virtual core resource to request for to run the App Master + private int amVCores = 1; + + // Application master jar file + private String appMasterJar = ""; + + private String tfConatinerJar = ""; + // Main class to invoke application master + private final String appMasterMainClass; + + private String tfClientPy; + + // Amt of memory to request for container in which shell script will be executed + private int containerMemory = 10; + // Amt. of virtual cores to request for container in which shell script will be executed + private int containerVirtualCores = 1; + // No. of containers in which the shell script needs to be executed + private int numContainers = 1; + private String nodeLabelExpression = null; + + // log4j.properties file + // if available, add to local resources and set into classpath + private String log4jPropFile = ""; + + // Start time for client + private final long clientStartTime = System.currentTimeMillis(); + // Timeout threshold for client. Kill app after time interval expires. + private long clientTimeout = 600000; + + // flag to indicate whether to keep containers across application attempts. + private boolean keepContainers = false; + + private long attemptFailuresValidityInterval = -1; + + private Vector containerRetryOptions = new Vector<>(5); + + // Debug flag + boolean debugFlag = false; + + // Timeline domain ID + private String domainId = null; + + // Flag to indicate whether to create the domain of the given ID + private boolean toCreateDomain = false; + + // Timeline domain reader access control + private String viewACLs = null; + + // Timeline domain writer access control + private String modifyACLs = null; + + private String flowName = null; + private String flowVersion = null; + private long flowRunId = 0L; + + // Command line options + private Options opts; + + private static final String shellCommandPath = "shellCommands"; + private static final String shellArgsPath = "shellArgs"; + private static final String appMasterJarPath = "AppMaster.jar"; + // Hardcoded path to custom log_properties + private static final String log4jPath = "log4j.properties"; + + public static final String SCRIPT_PATH = "ExecScript"; + + public static final String OPT_TF_CLIENT = "tf_client"; + public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; + + /** + * @param args Command line arguments + */ + public static void main(String[] args) { + LOG.info("start main in appmaster!"); + boolean result = false; + try { + Client client = new Client(); + LOG.info("Initializing tensorflow client"); + try { + boolean doRun = client.init(args); + if (!doRun) { + System.exit(0); + } + } catch (IllegalArgumentException e) { + System.err.println(e.getLocalizedMessage()); + client.printUsage(); + System.exit(-1); + } + result = client.run(); + } catch (Throwable t) { + LOG.fatal("Error running Client", t); + System.exit(1); + } + if (result) { + LOG.info("Application completed successfully"); + System.exit(0); + } + LOG.error("Application failed to complete successfully"); + System.exit(2); + } + + /** + */ + public Client(Configuration conf) throws Exception { + this( + "org.apache.hadoop.yarn.applications.tensorflow.ApplicationMaster", + conf); + } + + Client(String appMasterMainClass, Configuration conf) { + this.conf = conf; + this.appMasterMainClass = appMasterMainClass; + yarnClient = YarnClient.createYarnClient(); + yarnClient.init(conf); + opts = new Options(); + opts.addOption("appname", true, "Application Name. Default value - tensorflow"); + opts.addOption("priority", true, "Application Priority. Default 0"); + opts.addOption("queue", true, "RM Queue in which this application is to be submitted"); + opts.addOption("timeout", true, "Application timeout in milliseconds"); + opts.addOption("master_memory", true, "Amount of memory in MB to be requested to run the application master"); + opts.addOption("master_vcores", true, "Amount of virtual cores to be requested to run the application master"); + opts.addOption("jar", true, "Jar file containing the application master"); + opts.addOption("container_memory", true, "Amount of memory in MB to be requested to run a tensorflow worker"); + opts.addOption("container_vcores", true, "Amount of virtual cores to be requested to run a tensorflow worker"); + opts.addOption("num_containers", true, "No. of containers on which tensorflow workers to run"); + opts.addOption("log_properties", true, "log4j.properties file"); + opts.addOption("keep_containers_across_application_attempts", false, + "Flag to indicate whether to keep containers across application attempts." + + " If the flag is true, running containers will not be killed when" + + " application attempt fails and these containers will be retrieved by" + + " the new application attempt "); + opts.addOption("attempt_failures_validity_interval", true, + "when attempt_failures_validity_interval in milliseconds is set to > 0," + + "the failure number will not take failures which happen out of " + + "the validityInterval into failure count. " + + "If failure count reaches to maxAppAttempts, " + + "the application will be failed."); + opts.addOption("debug", false, "Dump out debug information"); + opts.addOption("domain", true, "ID of the timeline domain where the " + + "timeline entities will be put"); + opts.addOption("view_acls", true, "Users and groups that allowed to " + + "view the timeline entities in the given domain"); + opts.addOption("modify_acls", true, "Users and groups that allowed to " + + "modify the timeline entities in the given domain"); + opts.addOption("create", false, "Flag to indicate whether to create the " + + "domain specified with -domain."); + opts.addOption("flow_name", true, "Flow name which the distributed shell " + + "app belongs to"); + opts.addOption("flow_version", true, "Flow version which the distributed " + + "shell app belongs to"); + opts.addOption("flow_run_id", true, "Flow run ID which the distributed " + + "shell app belongs to"); + opts.addOption("help", false, "Print usage"); + opts.addOption("node_label_expression", true, + "Node label expression to determine the nodes" + + " where all the containers of this application" + + " will be allocated, \"\" means containers" + + " can be allocated anywhere, if you don't specify the option," + + " default node_label_expression of queue will be used."); + opts.addOption("container_retry_policy", true, + "Retry policy when container fails to run, " + + "0: NEVER_RETRY, 1: RETRY_ON_ALL_ERRORS, " + + "2: RETRY_ON_SPECIFIC_ERROR_CODES"); + opts.addOption("container_retry_error_codes", true, + "When retry policy is set to RETRY_ON_SPECIFIC_ERROR_CODES, error " + + "codes is specified with this option, " + + "e.g. --container_retry_error_codes 1,2,3"); + opts.addOption("container_max_retries", true, + "If container could retry, it specifies max retires"); + opts.addOption("container_retry_interval", true, + "Interval between each retry, unit is milliseconds"); + opts.addOption(OPT_TF_CLIENT, true, + "Provide client python of tensorflow"); + opts.addOption(OPT_TF_SERVER_JAR, true, + "Provide server jar of tensorflow"); + } + + /** + */ + public Client() throws Exception { + this(new YarnConfiguration()); + } + + /** + * Helper function to print out usage + */ + private void printUsage() { + new HelpFormatter().printHelp("Client", opts); + } + + /** + * Parse command line options + * @param args Parsed command line options + * @return Whether the init was successful to run the client + * @throws ParseException + */ + public boolean init(String[] args) throws ParseException { + + CommandLine cliParser = new GnuParser().parse(opts, args); + + if (args.length == 0) { + throw new IllegalArgumentException("No args specified for client to initialize"); + } + + if (cliParser.hasOption("log_properties")) { + String log4jPath = cliParser.getOptionValue("log_properties"); + try { + Log4jPropertyHelper.updateLog4jConfiguration(Client.class, log4jPath); + } catch (Exception e) { + LOG.warn("Can not set up custom log4j properties. " + e); + } + } + + if (cliParser.hasOption("help")) { + printUsage(); + return false; + } + + if (cliParser.hasOption("debug")) { + debugFlag = true; + + } + + if (cliParser.hasOption("keep_containers_across_application_attempts")) { + LOG.info("keep_containers_across_application_attempts"); + keepContainers = true; + } + + appName = cliParser.getOptionValue("appname", "tensorflow"); + amPriority = Integer.parseInt(cliParser.getOptionValue("priority", "0")); + amQueue = cliParser.getOptionValue("queue", "default"); + amMemory = Integer.parseInt(cliParser.getOptionValue("master_memory", "100")); + amVCores = Integer.parseInt(cliParser.getOptionValue("master_vcores", "1")); + tfClientPy = cliParser.getOptionValue(OPT_TF_CLIENT, TFClient.DEFAULT_TF_CLIENT_PY); + tfConatinerJar = cliParser.getOptionValue(OPT_TF_SERVER_JAR); + + if (amMemory < 0) { + throw new IllegalArgumentException("Invalid memory specified for application master, exiting." + + " Specified memory=" + amMemory); + } + if (amVCores < 0) { + throw new IllegalArgumentException("Invalid virtual cores specified for application master, exiting." + + " Specified virtual cores=" + amVCores); + } + + if (!cliParser.hasOption("jar")) { + throw new IllegalArgumentException("No jar file specified for application master"); + } + + appMasterJar = cliParser.getOptionValue("jar"); + + + + if (!cliParser.hasOption(OPT_TF_CLIENT)) { + throw new IllegalArgumentException( + "No tensorflow client specified to be executed by application master"); + } else { + tfClientPy = cliParser.getOptionValue(OPT_TF_CLIENT); + } + + containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "256")); + containerVirtualCores = Integer.parseInt(cliParser.getOptionValue("container_vcores", "1")); + numContainers = Integer.parseInt(cliParser.getOptionValue("num_containers", "1")); + + + if (containerMemory < 0 || containerVirtualCores < 0 || numContainers < 1) { + throw new IllegalArgumentException("Invalid no. of containers or container memory/vcores specified," + + " exiting." + + " Specified containerMemory=" + containerMemory + + ", containerVirtualCores=" + containerVirtualCores + + ", numContainer=" + numContainers); + } + + nodeLabelExpression = cliParser.getOptionValue("node_label_expression", null); + + clientTimeout = Integer.parseInt(cliParser.getOptionValue("timeout", "600000")); + + attemptFailuresValidityInterval = + Long.parseLong(cliParser.getOptionValue( + "attempt_failures_validity_interval", "-1")); + + log4jPropFile = cliParser.getOptionValue("log_properties", ""); + + // Get timeline domain options + if (cliParser.hasOption("domain")) { + domainId = cliParser.getOptionValue("domain"); + toCreateDomain = cliParser.hasOption("create"); + if (cliParser.hasOption("view_acls")) { + viewACLs = cliParser.getOptionValue("view_acls"); + } + if (cliParser.hasOption("modify_acls")) { + modifyACLs = cliParser.getOptionValue("modify_acls"); + } + } + + // Get container retry options + if (cliParser.hasOption("container_retry_policy")) { + containerRetryOptions.add("--container_retry_policy " + + cliParser.getOptionValue("container_retry_policy")); + } + if (cliParser.hasOption("container_retry_error_codes")) { + containerRetryOptions.add("--container_retry_error_codes " + + cliParser.getOptionValue("container_retry_error_codes")); + } + if (cliParser.hasOption("container_max_retries")) { + containerRetryOptions.add("--container_max_retries " + + cliParser.getOptionValue("container_max_retries")); + } + if (cliParser.hasOption("container_retry_interval")) { + containerRetryOptions.add("--container_retry_interval " + + cliParser.getOptionValue("container_retry_interval")); + } + + if (cliParser.hasOption("flow_name")) { + flowName = cliParser.getOptionValue("flow_name"); + } + if (cliParser.hasOption("flow_version")) { + flowVersion = cliParser.getOptionValue("flow_version"); + } + if (cliParser.hasOption("flow_run_id")) { + try { + flowRunId = Long.parseLong(cliParser.getOptionValue("flow_run_id")); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + "Flow run is not a valid long value", e); + } + } + return true; + } + + /** + * Main run function for the client + * @return true if application completed successfully + * @throws IOException + * @throws YarnException + */ + public boolean run() throws IOException, YarnException { + + LOG.info("Running Client"); + yarnClient.start(); + + YarnClusterMetrics clusterMetrics = yarnClient.getYarnClusterMetrics(); + LOG.info("Got Cluster metric info from ASM" + + ", numNodeManagers=" + clusterMetrics.getNumNodeManagers()); + + List clusterNodeReports = yarnClient.getNodeReports( + NodeState.RUNNING); + LOG.info("Got Cluster node info from ASM"); + for (NodeReport node : clusterNodeReports) { + LOG.info("Got node report from ASM for" + + ", nodeId=" + node.getNodeId() + + ", nodeAddress=" + node.getHttpAddress() + + ", nodeRackName=" + node.getRackName() + + ", nodeNumContainers=" + node.getNumContainers()); + } + + QueueInfo queueInfo = yarnClient.getQueueInfo(this.amQueue); + LOG.info("Queue info" + + ", queueName=" + queueInfo.getQueueName() + + ", queueCurrentCapacity=" + queueInfo.getCurrentCapacity() + + ", queueMaxCapacity=" + queueInfo.getMaximumCapacity() + + ", queueApplicationCount=" + queueInfo.getApplications().size() + + ", queueChildQueueCount=" + queueInfo.getChildQueues().size()); + + List listAclInfo = yarnClient.getQueueAclsInfo(); + for (QueueUserACLInfo aclInfo : listAclInfo) { + for (QueueACL userAcl : aclInfo.getUserAcls()) { + LOG.info("User ACL Info for Queue" + + ", queueName=" + aclInfo.getQueueName() + + ", userAcl=" + userAcl.name()); + } + } + + if (domainId != null && domainId.length() > 0 && toCreateDomain) { + prepareTimelineDomain(); + } + + // Get a new application id + YarnClientApplication app = yarnClient.createApplication(); + GetNewApplicationResponse appResponse = app.getNewApplicationResponse(); + // TODO get min/max resource capabilities from RM and change memory ask if needed + // If we do not have min/max, we may not be able to correctly request + // the required resources from the RM for the app master + // Memory ask has to be a multiple of min and less than max. + // Dump out information about cluster capability as seen by the resource manager + long maxMem = appResponse.getMaximumResourceCapability().getMemorySize(); + LOG.info("Max mem capability of resources in this cluster " + maxMem); + + // A resource ask cannot exceed the max. + if (amMemory > maxMem) { + LOG.info("AM memory specified above max threshold of cluster. Using max value." + + ", specified=" + amMemory + + ", max=" + maxMem); + amMemory = maxMem; + } + + int maxVCores = appResponse.getMaximumResourceCapability().getVirtualCores(); + LOG.info("Max virtual cores capability of resources in this cluster " + maxVCores); + + if (amVCores > maxVCores) { + LOG.info("AM virtual cores specified above max threshold of cluster. " + + "Using max value." + ", specified=" + amVCores + + ", max=" + maxVCores); + amVCores = maxVCores; + } + + // set the application name + ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext(); + ApplicationId appId = appContext.getApplicationId(); + + appContext.setKeepContainersAcrossApplicationAttempts(keepContainers); + appContext.setApplicationName(appName); + + if (attemptFailuresValidityInterval >= 0) { + appContext + .setAttemptFailuresValidityInterval(attemptFailuresValidityInterval); + } + + Set tags = new HashSet(); + if (flowName != null) { + tags.add(TimelineUtils.generateFlowNameTag(flowName)); + } + if (flowVersion != null) { + tags.add(TimelineUtils.generateFlowVersionTag(flowVersion)); + } + if (flowRunId != 0) { + tags.add(TimelineUtils.generateFlowRunIdTag(flowRunId)); + } + appContext.setApplicationTags(tags); + + // set local resources for the application master + // local files or archives as needed + // In this scenario, the jar file for the application master is part of the local resources + Map localResources = new HashMap(); + + TFAmContainer tfAmContainer = new TFAmContainer(this); + + LOG.info("Copy App Master jar from local filesystem and add to local environment"); + // Copy the application master jar to the filesystem + // Create a local resource to point to the destination jar path + FileSystem fs = FileSystem.get(conf); + tfAmContainer.addToLocalResources(fs, appMasterJar, appMasterJarPath, appId.toString(), + localResources, null); + + // Set the log4j properties if needed + if (!log4jPropFile.isEmpty()) { + tfAmContainer.addToLocalResources(fs, log4jPropFile, log4jPath, appId.toString(), + localResources, null); + } + + // Set the necessary security tokens as needed + //amContainer.setContainerTokens(containerToken); + + // Set the env variables to be setup in the env where the application master will be run + LOG.info("Set the environment for the application master"); + Map env = tfAmContainer.setJavaEnv(conf); + + // Set the necessary command to execute the application master + + if (null != nodeLabelExpression) { + appContext.setNodeLabelExpression(nodeLabelExpression); + } + + + // Get final commmand + StringBuilder command = tfAmContainer.makeCommands(amMemory, appMasterMainClass, containerMemory, containerVirtualCores, + numContainers, tfClientPy, tfConatinerJar, debugFlag, containerRetryOptions); + + LOG.info("Completed setting up app master command " + command.toString()); + List commands = new ArrayList(); + commands.add(command.toString()); + + // Set up the container launch context for the application master + ContainerLaunchContext amContainer = ContainerLaunchContext.newInstance( + localResources, env, commands, null, null, null); + + // Set up resource type requirements + // For now, both memory and vcores are supported, so we set memory and + // vcores requirements + Resource capability = Resource.newInstance(amMemory, amVCores); + appContext.setResource(capability); + + // Service data is a binary blob that can be passed to the application + // Not needed in this scenario + // amContainer.setServiceData(serviceData); + + // Setup security tokens + if (UserGroupInformation.isSecurityEnabled()) { + // Note: Credentials class is marked as LimitedPrivate for HDFS and MapReduce + Credentials credentials = new Credentials(); + String tokenRenewer = YarnClientUtils.getRmPrincipal(conf); + if (tokenRenewer == null || tokenRenewer.length() == 0) { + throw new IOException( + "Can't get Master Kerberos principal for the RM to use as renewer"); + } + + // For now, only getting tokens for the default file-system. + final Token tokens[] = + fs.addDelegationTokens(tokenRenewer, credentials); + if (tokens != null) { + for (Token token : tokens) { + LOG.info("Got dt for " + fs.getUri() + "; " + token); + } + } + DataOutputBuffer dob = new DataOutputBuffer(); + credentials.writeTokenStorageToStream(dob); + ByteBuffer fsTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); + amContainer.setTokens(fsTokens); + } + + appContext.setAMContainerSpec(amContainer); + + // Set the priority for the application master + // TODO - what is the range for priority? how to decide? + Priority pri = Priority.newInstance(amPriority); + appContext.setPriority(pri); + + // Set the queue to which this application is to be submitted in the RM + appContext.setQueue(amQueue); + + // Submit the application to the applications manager + // SubmitApplicationResponse submitResp = applicationsManager.submitApplication(appRequest); + // Ignore the response as either a valid response object is returned on success + // or an exception thrown to denote some form of a failure + LOG.info("Submitting application to ASM"); + + yarnClient.submitApplication(appContext); + + // TODO + // Try submitting the same request again + // app submission failure? + + // Monitor the application + return monitorApplication(appId); + + } + + /** + * Monitor the submitted application for completion. + * Kill application if time expires. + * @param appId Application Id of application to be monitored + * @return true if application completed successfully + * @throws YarnException + * @throws IOException + */ + private boolean monitorApplication(ApplicationId appId) + throws YarnException, IOException { + + while (true) { + + // Check app status every 1 second. + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + LOG.debug("Thread sleep in monitoring loop interrupted"); + } + + // Get application report for the appId we are interested in + ApplicationReport report = yarnClient.getApplicationReport(appId); + + LOG.info("Got application report from ASM for" + + ", appId=" + appId.getId() + + ", clientToAMToken=" + report.getClientToAMToken() + + ", appDiagnostics=" + report.getDiagnostics() + + ", appMasterHost=" + report.getHost() + + ", appQueue=" + report.getQueue() + + ", appMasterRpcPort=" + report.getRpcPort() + + ", appStartTime=" + report.getStartTime() + + ", yarnAppState=" + report.getYarnApplicationState().toString() + + ", tfAppFinalState=" + report.getFinalApplicationStatus().toString() + + ", appTrackingUrl=" + report.getTrackingUrl() + + ", appUser=" + report.getUser()); + + YarnApplicationState state = report.getYarnApplicationState(); + FinalApplicationStatus tfStatus = report.getFinalApplicationStatus(); + if (YarnApplicationState.FINISHED == state) { + if (FinalApplicationStatus.SUCCEEDED == tfStatus) { + LOG.info("Application has completed successfully. Breaking monitoring loop"); + return true; + } + else { + LOG.info("Application did finished unsuccessfully." + + " YarnState=" + state.toString() + ", tfAppFinalState=" + tfStatus.toString() + + ". Breaking monitoring loop"); + return false; + } + } + else if (YarnApplicationState.KILLED == state + || YarnApplicationState.FAILED == state) { + LOG.info("Application did not finish." + + " YarnState=" + state.toString() + ", tfAppFinalState=" + tfStatus.toString() + + ". Breaking monitoring loop"); + return false; + } + + if (System.currentTimeMillis() > (clientStartTime + clientTimeout)) { + LOG.info("Reached client specified timeout for application. Killing application"); + forceKillApplication(appId); + return false; + } + } + + } + + /** + * Kill a submitted application by sending a call to the ASM + * @param appId Application Id to be killed. + * @throws YarnException + * @throws IOException + */ + private void forceKillApplication(ApplicationId appId) + throws YarnException, IOException { + // TODO clarify whether multiple jobs with the same app id can be submitted and be running at + // the same time. + // If yes, can we kill a particular attempt only? + + // Response can be ignored as it is non-null on success or + // throws an exception in case of failures + yarnClient.killApplication(appId); + } + + private void prepareTimelineDomain() { + TimelineClient timelineClient = null; + if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) { + timelineClient = TimelineClient.createTimelineClient(); + timelineClient.init(conf); + timelineClient.start(); + } else { + LOG.warn("Cannot put the domain " + domainId + + " because the timeline service is not enabled"); + return; + } + try { + //TODO: we need to check and combine the existing timeline domain ACLs, + //but let's do it once we have client java library to query domains. + TimelineDomain domain = new TimelineDomain(); + domain.setId(domainId); + domain.setReaders( + viewACLs != null && viewACLs.length() > 0 ? viewACLs : " "); + domain.setWriters( + modifyACLs != null && modifyACLs.length() > 0 ? modifyACLs : " "); + timelineClient.putDomain(domain); + LOG.info("Put the timeline domain: " + + TimelineUtils.dumpTimelineRecordtoJSON(domain)); + } catch (Exception e) { + LOG.error("Error when putting the timeline domain", e); + } finally { + timelineClient.stop(); + } + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java new file mode 100644 index 0000000000000..4a0b75429c98e --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java @@ -0,0 +1,5 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +public class ClusterSpec { + +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java new file mode 100644 index 0000000000000..f254fdf8faabe --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.applications.tensorflow; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +/** + * Constants used in both Client and Application Master + */ +@InterfaceAudience.Public +@InterfaceStability.Unstable +public class DSConstants { + + /** + * Environment key name pointing to the shell script's location + */ + public static final String DISTRIBUTEDSHELLSCRIPTLOCATION = "DISTRIBUTEDSHELLSCRIPTLOCATION"; + + /** + * Environment key name denoting the file timestamp for the shell script. + * Used to validate the local resource. + */ + public static final String DISTRIBUTEDSHELLSCRIPTTIMESTAMP = "DISTRIBUTEDSHELLSCRIPTTIMESTAMP"; + + /** + * Environment key name denoting the file content length for the shell script. + * Used to validate the local resource. + */ + public static final String DISTRIBUTEDSHELLSCRIPTLEN = "DISTRIBUTEDSHELLSCRIPTLEN"; + + /** + * Environment key name denoting the timeline domain ID. + */ + public static final String DISTRIBUTEDSHELLTIMELINEDOMAIN = "DISTRIBUTEDSHELLTIMELINEDOMAIN"; + + public static final String APP_NAME = "tenseoflow"; +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DistributedShellTimelinePlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DistributedShellTimelinePlugin.java new file mode 100644 index 0000000000000..28cbdb344c707 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DistributedShellTimelinePlugin.java @@ -0,0 +1,79 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.applications.tensorflow; + +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId; +import org.apache.hadoop.yarn.server.timeline.NameValuePair; +import org.apache.hadoop.yarn.server.timeline.TimelineEntityGroupPlugin; +import org.apache.hadoop.yarn.util.ConverterUtils; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.SortedSet; + +/** + * Timeline v1.5 reader plugin for YARN distributed shell. It tranlsates an + * incoming getEntity request to a set of related timeline entity groups, via + * the information provided in the primary filter or entity id field. + */ +public class DistributedShellTimelinePlugin extends TimelineEntityGroupPlugin { + + @Override + public Set getTimelineEntityGroupId(String entityType, + NameValuePair primaryFilter, Collection secondaryFilters) { + if (ApplicationMaster.DSEntity.DS_CONTAINER.toString().equals(entityType)) { + if (primaryFilter == null) { + return null; + } + return toEntityGroupId(primaryFilter.getValue().toString()); + } + return null; + } + + @Override + public Set getTimelineEntityGroupId(String entityId, + String entityType) { + if (ApplicationMaster.DSEntity.DS_CONTAINER.toString().equals(entityId)) { + ContainerId containerId = ContainerId.fromString(entityId); + ApplicationId appId = containerId.getApplicationAttemptId() + .getApplicationId(); + return toEntityGroupId(appId.toString()); + } + return null; + } + + @Override + public Set getTimelineEntityGroupId(String entityType, + SortedSet entityIds, Set eventTypes) { + // Right now this method is not used by TimelineEntityGroupPlugin + return null; + } + + private Set toEntityGroupId(String strAppId) { + ApplicationId appId = ApplicationId.fromString(strAppId); + TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance( + appId, ApplicationMaster.CONTAINER_ENTITY_GROUP_ID); + Set result = new HashSet<>(); + result.add(groupId); + return result; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Log4jPropertyHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Log4jPropertyHelper.java new file mode 100644 index 0000000000000..441185bf19b80 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Log4jPropertyHelper.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.applications.tensorflow; + +import java.io.FileInputStream; +import java.io.InputStream; +import java.util.Map.Entry; +import java.util.Properties; + +import org.apache.commons.io.IOUtils; +import org.apache.log4j.LogManager; +import org.apache.log4j.PropertyConfigurator; + + +public class Log4jPropertyHelper { + + public static void updateLog4jConfiguration(Class targetClass, + String log4jPath) throws Exception { + Properties customProperties = new Properties(); + FileInputStream fs = null; + InputStream is = null; + try { + fs = new FileInputStream(log4jPath); + is = targetClass.getResourceAsStream("/log4j.properties"); + customProperties.load(fs); + Properties originalProperties = new Properties(); + originalProperties.load(is); + for (Entry entry : customProperties.entrySet()) { + originalProperties.setProperty(entry.getKey().toString(), entry + .getValue().toString()); + } + LogManager.resetConfiguration(); + PropertyConfigurator.configure(originalProperties); + }finally { + IOUtils.closeQuietly(is); + IOUtils.closeQuietly(fs); + } + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java new file mode 100644 index 0000000000000..3907f92135e91 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java @@ -0,0 +1,129 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.yarn.api.ApplicationConstants; +import org.apache.hadoop.yarn.api.records.LocalResource; +import org.apache.hadoop.yarn.api.records.LocalResourceType; +import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; +import org.apache.hadoop.yarn.conf.YarnConfiguration; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Vector; + +/** + * Created by muzhongz on 16-12-15. + */ +public class TFAmContainer { + private static final Log LOG = LogFactory.getLog(TFAmContainer.class); + + private Client client; + public TFAmContainer(Client client) { + this.client = client; + } + + public void addToLocalResources(FileSystem fs, String fileSrcPath, + String fileDstPath, String appId, Map localResources, + String resources) throws IOException { + String suffix = + client.getAppName() + "/" + appId + "/" + fileDstPath; + Path dst = + new Path(fs.getHomeDirectory(), suffix); + if (fileSrcPath == null) { + FSDataOutputStream ostream = null; + try { + ostream = FileSystem + .create(fs, dst, new FsPermission((short) 0710)); + ostream.writeUTF(resources); + } finally { + IOUtils.closeQuietly(ostream); + } + } else { + fs.copyFromLocalFile(new Path(fileSrcPath), dst); + } + FileStatus scFileStatus = fs.getFileStatus(dst); + LocalResource scRsrc = + LocalResource.newInstance( + URL.fromURI(dst.toUri()), + LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, + scFileStatus.getLen(), scFileStatus.getModificationTime()); + localResources.put(fileDstPath, scRsrc); + } + + public Map setJavaEnv(Configuration conf) { + Map env = new HashMap(); + + // Add AppMaster.jar location to classpath + // At some point we should not be required to add + // the hadoop specific classpaths to the env. + // It should be provided out of the box. + // For now setting all required classpaths including + // the classpath to "." for the application jar + StringBuilder classPathEnv = new StringBuilder(ApplicationConstants.Environment.CLASSPATH.$$()) + .append(ApplicationConstants.CLASS_PATH_SEPARATOR).append("./*"); + for (String c : conf.getStrings( + YarnConfiguration.YARN_APPLICATION_CLASSPATH, + YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH)) { + classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR); + classPathEnv.append(c.trim()); + } + classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR).append( + "./log4j.properties"); + + // add the runtime classpath needed for tests to work + if (conf.getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) { + classPathEnv.append(':'); + classPathEnv.append(System.getProperty("java.class.path")); + } + + env.put("CLASSPATH", classPathEnv.toString()); + return env; + } + + + public StringBuilder makeCommands(long amMemory, String appMasterMainClass, int containerMemory, int containerVirtualCores, + int numContainers, String tfClientPy, String tfConatinerJar, boolean debugFlag, Vector containerRetryOptions) { + // Set the necessary command to execute the application master + Vector vargs = new Vector(30); + + // Set java executable command + LOG.info("Setting up app master command"); + vargs.add(ApplicationConstants.Environment.JAVA_HOME.$$() + "/bin/java"); + // Set Xmx based on am memory size + vargs.add("-Xmx" + amMemory + "m"); + // Set class name + vargs.add(appMasterMainClass); + // Set params for Application Master + vargs.add("--container_memory " + String.valueOf(containerMemory)); + vargs.add("--container_vcores " + String.valueOf(containerVirtualCores)); + vargs.add("--num_containers " + String.valueOf(numContainers)); + vargs.add("--" + Client.OPT_TF_CLIENT + " " + String.valueOf(tfClientPy)); + vargs.add("--" + Client. OPT_TF_SERVER_JAR + " " + String.valueOf(tfConatinerJar)); + if (debugFlag) { + vargs.add("--debug"); + } + + vargs.addAll(containerRetryOptions); + + vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/AppMaster.stdout"); + vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/AppMaster.stderr"); + + // Get final commmand + StringBuilder command = new StringBuilder(); + for (CharSequence str : vargs) { + command.append(str).append(" "); + } + return command; + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java new file mode 100644 index 0000000000000..3168f484db989 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java @@ -0,0 +1,29 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +import java.io.IOException; + +/** + * Created by muzhongz on 16-11-30. + */ +public class TFClient implements Runnable { + public static final String DEFAULT_TF_CLIENT_PY = "tf_client.py"; + private String clientPy; + + public TFClient(String tfClientName) { + + clientPy = tfClientName; + } + + @Override + public void run() { + Process process = null; + try { + process = Runtime.getRuntime().exec("python " + clientPy); + process.waitFor(); + } catch (IOException e) { + e.printStackTrace(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java new file mode 100644 index 0000000000000..c6d7f926af927 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java @@ -0,0 +1,112 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +import org.apache.commons.io.IOUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.yarn.api.ApplicationConstants; +import org.apache.hadoop.yarn.api.records.LocalResource; +import org.apache.hadoop.yarn.api.records.LocalResourceType; +import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.io.IOException; +import java.util.*; + +/** + * Created by muzhongz on 16-12-14. + */ +public class TFContainer { + private static final Log LOG = LogFactory.getLog(TFContainer.class); + + private String appName = DSConstants.APP_NAME; + private ApplicationMaster appMaster; + public static final String SERVER_PY_PATH = "tf_server.py"; + + public TFContainer(ApplicationMaster am) { + appMaster = am; + appName = appMaster.getAppName(); + } + + public void addToLocalResources(FileSystem fs, String fileSrcPath, + String fileDstPath, String appId, Map localResources, + String resources) throws IOException { + String suffix = + appName + "/" + appId + "/" + fileDstPath; + Path dst = + new Path(fs.getHomeDirectory(), suffix); + LOG.info( fileSrcPath + " ==> " + dst.toString()); + if (fileSrcPath == null) { + FSDataOutputStream ostream = null; + try { + ostream = FileSystem + .create(fs, dst, new FsPermission((short) 0710)); + ostream.writeUTF(resources); + } finally { + IOUtils.closeQuietly(ostream); + } + } else { + fs.copyFromLocalFile(new Path(fileSrcPath), dst); + } + FileStatus scFileStatus = fs.getFileStatus(dst); + LocalResource scRsrc = + LocalResource.newInstance( + URL.fromURI(dst.toUri()), + LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, + scFileStatus.getLen(), scFileStatus.getModificationTime()); + localResources.put(fileDstPath, scRsrc); + } + + public Map setJavaEnv(Configuration conf, String tfServerJar) { + // Set the java environment + Map env = new HashMap(); + StringBuilder classPathEnv = new StringBuilder(ApplicationConstants.Environment.CLASSPATH.$$()) + .append(ApplicationConstants.CLASS_PATH_SEPARATOR).append("./*"); + for (String c : conf.getStrings( + YarnConfiguration.YARN_APPLICATION_CLASSPATH, + YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH)) { + classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR); + classPathEnv.append(c.trim()); + } + classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR).append("./log4j.properties"); + + // add the runtime classpath needed for tests to work + if (conf.getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) { + classPathEnv.append(':'); + classPathEnv.append(System.getProperty("java.class.path")); + } + + if (tfServerJar != null) { + classPathEnv.append(ApplicationConstants.CLASS_PATH_SEPARATOR); + classPathEnv.append(tfServerJar); + } + env.put("CLASSPATH", classPathEnv.toString()); + return env; + } + + public StringBuilder makeCommands(long containerMemory) { + // Set the necessary command to execute on the allocated container + Vector vargs = new Vector(5); + vargs.add(ApplicationConstants.Environment.JAVA_HOME.$$() + "/bin/java"); + vargs.add("-Xmx" + containerMemory + "m"); + String containerClassName = TFServer.class.getName(); + vargs.add(containerClassName); + vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/TFServer." + ApplicationConstants.STDOUT); + vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/TFServer." + ApplicationConstants.STDERR); + + // Get final commmand + StringBuilder command = new StringBuilder(); + for (CharSequence str : vargs) { + command.append(str).append(" "); + } + + return command; + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java new file mode 100644 index 0000000000000..b96e354aa7f87 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java @@ -0,0 +1,90 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.yarn.conf.YarnConfiguration; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; + +/** + * Created by muzhongz on 16-11-29. + */ +public class TFServer implements Runnable { + private static final Log LOG = LogFactory.getLog(TFServer.class); + private String tfServerPy; + private YarnConfiguration conf; + + public static void main(String[] args) { + LOG.info("start container"); + TFServer server = new TFServer("tf_server.py"); + server.startTFServer(); + } + + private void execCmd(String cmd) { + Process process = null; + try { + LOG.info("cmd is " + cmd); + process = Runtime.getRuntime().exec(cmd); + } catch (IOException e) { + LOG.fatal("cmd running failed", e); + e.printStackTrace(); + } + + try { + LOG.info("cmd log--->"); + BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream())); + String line; + while ((line = in.readLine()) != null) { + + LOG.info(line); + System.out.println(line); + } + in.close(); + LOG.info("<---cmd log end"); + process.waitFor(); + } catch (InterruptedException e) { + LOG.fatal("waiting error ", e); + e.printStackTrace(); + } catch (IOException e) { + LOG.info("io exception"); + e.printStackTrace(); + } + } + + @Override + public void run() { + execCmd("ls -l"); + execCmd("pwd"); + execCmd("python --version"); + String command = "python " + tfServerPy; + execCmd(command); + } + + public TFServer(String serverPy) { + this.tfServerPy = serverPy; + conf = new YarnConfiguration(); + /* + for (String c : conf.getStrings( + YarnConfiguration.YARN_APPLICATION_CLASSPATH, + YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) { + LOG.info(c); + } + */ + } + + private String address; + private int port; + private ClusterSpec clusterSpec; + + public void startTFServer() { + Thread thread = new Thread(this); + thread.start(); + } + + public int stochasticNetport() { + return 0; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/package-info.java new file mode 100644 index 0000000000000..23a6a1e1414a1 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/package-info.java @@ -0,0 +1,19 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.applications.tensorflow; \ No newline at end of file From c723034ff9711684fe3da856cf8e822a8f593788 Mon Sep 17 00:00:00 2001 From: Zhong Muzhong Date: Mon, 26 Dec 2016 12:18:17 +0800 Subject: [PATCH 03/32] PM: rm useless pom file --- .../pom.xml | 198 ------------------ 1 file changed, 198 deletions(-) delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml deleted file mode 100644 index af94871ee16a7..0000000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml +++ /dev/null @@ -1,198 +0,0 @@ - - - - - hadoop-yarn-applications - org.apache.hadoop - 3.0.0-alpha2-SNAPSHOT - - 4.0.0 - org.apache.hadoop - hadoop-yarn-applications-tensorflow - 3.0.0-alpha2-SNAPSHOT - Apache Hadoop YARN tensorflow - - - - ${project.parent.parent.basedir} - - - - - - org.apache.hadoop - hadoop-common - provided - - - - junit - junit - test - - - - log4j - log4j - - - commons-lang - commons-lang - - - com.google.guava - guava - - - commons-logging - commons-logging - - - commons-cli - commons-cli - - - commons-io - commons-io - - - org.apache.hadoop - hadoop-yarn-server-applicationhistoryservice - - - org.apache.hadoop - hadoop-yarn-server-timelineservice - test-jar - test - - - - org.apache.hadoop - hadoop-annotations - - - - org.apache.hadoop - hadoop-common - test-jar - test - - - - org.apache.hadoop - hadoop-yarn-api - - - - org.apache.hadoop - hadoop-yarn-common - - - - org.apache.hadoop - hadoop-yarn-client - - - - org.apache.hadoop - hadoop-yarn-server-nodemanager - test - - - - org.apache.hadoop - hadoop-yarn-server-resourcemanager - test - - - - org.apache.hadoop - hadoop-yarn-server-tests - test-jar - test - - - org.mockito - mockito-all - test - - - org.apache.hadoop - hadoop-yarn-server-timeline-pluginstorage - - - org.apache.hadoop - hadoop-yarn-server-timeline-pluginstorage - test-jar - test - - - org.apache.hadoop - hadoop-yarn-common - test-jar - test - - - org.apache.hadoop - hadoop-hdfs - test - - - org.apache.hadoop - hadoop-hdfs - test - test-jar - - - - - - - maven-jar-plugin - - - - jar - - - test-compile - - - - - - org.apache.hadoop.yarn.applications.tensorflow.Client - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - ${java.home} - - - - - - - - From 9d0d013472e59b93e3154d3ffef8b35b92433af3 Mon Sep 17 00:00:00 2001 From: Zhong Muzhong Date: Mon, 26 Dec 2016 12:21:20 +0800 Subject: [PATCH 04/32] AM, CONTAINER: 1. add cluster spec's dynamic creation 2. change container starting sequence 3. integrate java-version Tensorflow server booting entrance 4. waiting for test --- .../pom.xml | 238 ++++++++ .../pom2.xml | 164 ------ .../tensorflow/ApplicationMaster.java | 554 +++++++++--------- .../yarn/applications/tensorflow/Client.java | 53 +- .../applications/tensorflow/ClusterSpec.java | 216 +++++++ .../tensorflow/LaunchContainerThread.java | 165 ++++++ .../tensorflow/TFAmContainer.java | 8 +- .../tensorflow/TFApplication.java | 11 + .../applications/tensorflow/TFClient.java | 71 ++- .../applications/tensorflow/TFContainer.java | 8 +- .../tensorflow/TFParamServerAddress.java | 11 + .../applications/tensorflow/TFServer.java | 87 ++- .../tensorflow/TFServerAddress.java | 60 ++ .../tensorflow/TFWorkerAddress.java | 12 + .../main/proto/tfappmaster_rpc_service.proto | 7 + 15 files changed, 1143 insertions(+), 522 deletions(-) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom2.xml create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFParamServerAddress.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServerAddress.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFWorkerAddress.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/proto/tfappmaster_rpc_service.proto diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml new file mode 100644 index 0000000000000..62f5e2c051edb --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml @@ -0,0 +1,238 @@ + + + + + hadoop-yarn-applications + org.apache.hadoop + 3.0.0-alpha2-SNAPSHOT + + 4.0.0 + org.apache.hadoop + hadoop-yarn-applications-tensorflow + 3.0.0-alpha2-SNAPSHOT + Apache Hadoop YARN tensorflow + + + + bintray + Bintray Repository + http://dl.bintray.com/fvunicorn/maven + + + + + + ${project.parent.parent.basedir} + + 1.9.13 + + + + + + org.apache.hadoop + hadoop-common + provided + + + + junit + junit + test + + + + + + org.tensorflow + java-bridge + 0.1.0 + + + + + org.codehaus.jackson + jackson-mapper-asl + ${jackson.version} + + + org.codehaus.jackson + jackson-core-asl + ${jackson.version} + + + org.codehaus.jackson + jackson-jaxrs + ${jackson.version} + + + commons-codec + commons-codec + compile + + + + log4j + log4j + + + commons-lang + commons-lang + + + com.google.guava + guava + + + commons-logging + commons-logging + + + commons-cli + commons-cli + + + commons-io + commons-io + + + org.apache.hadoop + hadoop-yarn-server-applicationhistoryservice + + + org.apache.hadoop + hadoop-yarn-server-timelineservice + test-jar + test + + + + org.apache.hadoop + hadoop-annotations + + + + org.apache.hadoop + hadoop-common + test-jar + test + + + + org.apache.hadoop + hadoop-yarn-api + + + + org.apache.hadoop + hadoop-yarn-common + + + + org.apache.hadoop + hadoop-yarn-client + + + + org.apache.hadoop + hadoop-yarn-server-nodemanager + test + + + + org.apache.hadoop + hadoop-yarn-server-resourcemanager + test + + + + org.apache.hadoop + hadoop-yarn-server-tests + test-jar + test + + + org.mockito + mockito-all + test + + + org.apache.hadoop + hadoop-yarn-server-timeline-pluginstorage + + + org.apache.hadoop + hadoop-yarn-server-timeline-pluginstorage + test-jar + test + + + org.apache.hadoop + hadoop-yarn-common + test-jar + test + + + org.apache.hadoop + hadoop-hdfs + test + + + org.apache.hadoop + hadoop-hdfs + test + test-jar + + + + + + + maven-jar-plugin + + + + jar + + + test-compile + + + + + + org.apache.hadoop.yarn.applications.tensorflow.Client + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + ${java.home} + + + + + + + + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom2.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom2.xml deleted file mode 100644 index ab9b9725a9af0..0000000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom2.xml +++ /dev/null @@ -1,164 +0,0 @@ - - - 4.0.0 - - org.apache.hadoop - yacop - 1.0-SNAPSHOT - - - 2.7.2 - UTF-8 - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - - - junit - junit - 4.12 - test - - - org.mockito - mockito-all - 1.10.19 - test - - - - log4j - log4j - 1.2.17 - - - commons-lang - commons-lang - 2.6 - - - com.google.guava - guava - 11.0.2 - - - commons-logging - commons-logging - 1.1.3 - - - commons-cli - commons-cli - 1.2 - - - commons-io - commons-io - 2.4 - - - org.apache.hadoop - hadoop-annotations - ${hadoop.version} - - - org.apache.hadoop - hadoop-yarn-api - ${hadoop.version} - - - org.apache.hadoop - hadoop-yarn-common - ${hadoop.version} - - - org.apache.hadoop - hadoop-yarn-client - ${hadoop.version} - - - org.apache.hadoop - hadoop-yarn-registry - ${hadoop.version} - - - - org.apache.hadoop - hadoop-common - test-jar - ${hadoop.version} - test - - - org.apache.hadoop - hadoop-yarn-server-nodemanager - ${hadoop.version} - test - - - org.apache.hadoop - hadoop-yarn-server-resourcemanager - ${hadoop.version} - test - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - test - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - test - test-jar - - - org.apache.hadoop - hadoop-yarn-server-tests - test-jar - ${hadoop.version} - test - - - org.apache.curator - curator-test - 2.10.0 - test - - - - - - - org.apache.maven.plugins - maven-jar-plugin - 2.6 - - - - org.apache.hadoop.yarn.applications.yacop.NClient - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.5.1 - - 1.7 - 1.7 - - - - - - diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index 76caf4a77ab9f..57138df1b923d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -29,13 +29,11 @@ import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Vector; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; @@ -51,7 +49,6 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; @@ -62,7 +59,6 @@ import org.apache.hadoop.util.Shell; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; -import org.apache.hadoop.yarn.api.ContainerManagementProtocol; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.*; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; @@ -95,7 +91,7 @@ public class ApplicationMaster { public static enum DSEvent { DS_APP_ATTEMPT_START, DS_APP_ATTEMPT_END, DS_CONTAINER_START, DS_CONTAINER_END } - + @VisibleForTesting @Private public static enum DSEntity { @@ -107,6 +103,10 @@ public static enum DSEntity { // Configuration private Configuration conf; + public Configuration getConfiguration() { + return conf; + } + // Handle to communicate with the Resource Manager @SuppressWarnings("rawtypes") private AMRMClientAsync amRMClient; @@ -117,6 +117,9 @@ public static enum DSEntity { // Handle to communicate with the Node Manager private NMClientAsync nmClientAsync; + public NMClientAsync getNMClientAsync() { + return nmClientAsync; + } // Listen to process the response from the Node Manager private NMCallbackHandler containerListener; @@ -124,6 +127,10 @@ public static enum DSEntity { @VisibleForTesting protected ApplicationAttemptId appAttemptID; + public ApplicationAttemptId getAppAttempId() { + return appAttemptID; + } + // TODO // For status update for clients - yet to be implemented // Hostname of the container @@ -146,6 +153,10 @@ public static enum DSEntity { // Priority of the request private int requestPriority; + private int numTotalWokerContainers = 1; + + private int numTotalParamServerContainer = 0; + // Counter for completed containers ( complete denotes successful or failed ) private AtomicInteger numCompletedContainers = new AtomicInteger(); // Allocated container count so that we know how many containers has the RM @@ -162,7 +173,7 @@ public static enum DSEntity { // Container retry options private ContainerRetryPolicy containerRetryPolicy = - ContainerRetryPolicy.NEVER_RETRY; + ContainerRetryPolicy.NEVER_RETRY; private Set containerRetryErrorCodes = null; private int containerMaxRetries = 0; private int containrRetryInterval = 0; @@ -170,14 +181,14 @@ public static enum DSEntity { // Timeline domain ID private String domainId = null; - // TF server jar file - private String tfServerJar = ""; + // TF server jar file + private String tfServerJar = ""; // Hardcoded path to shell script in launch container's local env private static final String EXEC_SHELL_STRING_PATH = Client.SCRIPT_PATH - + ".sh"; + + ".sh"; private static final String EXEC_BAT_SCRIPT_STRING_PATH = Client.SCRIPT_PATH - + ".bat"; + + ".bat"; // Hardcoded path to custom log_properties private static final String log4jPath = "log4j.properties"; @@ -188,6 +199,9 @@ public static enum DSEntity { private volatile boolean done; private ByteBuffer allTokens; + public ByteBuffer getAllTokens() { + return allTokens; + } // Launch threads private List launchThreads = new ArrayList(); @@ -210,7 +224,10 @@ public static enum DSEntity { @VisibleForTesting protected final Set launchedContainers = - Collections.newSetFromMap(new ConcurrentHashMap()); + Collections.newSetFromMap(new ConcurrentHashMap()); + + protected final Set allocatedContainers = + Collections.newSetFromMap(new ConcurrentHashMap()); /** * @param args TF server args @@ -250,13 +267,13 @@ private void dumpOutDebugInfo() { for (Map.Entry env : envs.entrySet()) { LOG.info("System env: key=" + env.getKey() + ", val=" + env.getValue()); System.out.println("System env: key=" + env.getKey() + ", val=" - + env.getValue()); + + env.getValue()); } BufferedReader buf = null; try { String lines = Shell.WINDOWS ? Shell.execCommand("cmd", "/c", "dir") : - Shell.execCommand("ls", "-al"); + Shell.execCommand("ls", "-al"); buf = new BufferedReader(new StringReader(lines)); String line = ""; while ((line = buf.readLine()) != null) { @@ -286,36 +303,36 @@ public ApplicationMaster() { public boolean init(String[] args) throws ParseException, IOException { Options opts = new Options(); opts.addOption("app_attempt_id", true, - "App Attempt ID. Not to be used unless for testing purposes"); + "App Attempt ID. Not to be used unless for testing purposes"); opts.addOption("container_memory", true, - "Amount of memory in MB to be requested to run the shell command"); + "Amount of memory in MB to be requested to run the shell command"); opts.addOption("container_vcores", true, - "Amount of virtual cores to be requested to run the shell command"); + "Amount of virtual cores to be requested to run the shell command"); opts.addOption("num_containers", true, - "No. of containers on which the shell command needs to be executed"); + "No. of containers on which the shell command needs to be executed"); opts.addOption("priority", true, "Application Priority. Default 0"); opts.addOption("container_retry_policy", true, - "Retry policy when container fails to run, " - + "0: NEVER_RETRY, 1: RETRY_ON_ALL_ERRORS, " - + "2: RETRY_ON_SPECIFIC_ERROR_CODES"); + "Retry policy when container fails to run, " + + "0: NEVER_RETRY, 1: RETRY_ON_ALL_ERRORS, " + + "2: RETRY_ON_SPECIFIC_ERROR_CODES"); opts.addOption("container_retry_error_codes", true, - "When retry policy is set to RETRY_ON_SPECIFIC_ERROR_CODES, error " - + "codes is specified with this option, " - + "e.g. --container_retry_error_codes 1,2,3"); + "When retry policy is set to RETRY_ON_SPECIFIC_ERROR_CODES, error " + + "codes is specified with this option, " + + "e.g. --container_retry_error_codes 1,2,3"); opts.addOption("container_max_retries", true, - "If container could retry, it specifies max retires"); + "If container could retry, it specifies max retires"); opts.addOption("container_retry_interval", true, - "Interval between each retry, unit is milliseconds"); + "Interval between each retry, unit is milliseconds"); opts.addOption("debug", false, "Dump out debug information"); opts.addOption("help", false, "Print usage"); opts.addOption("jar", true, "Jar file containing the tensorflow server"); - opts.addOption(Client.OPT_TF_CLIENT, true, "Provide client python of tensorflow"); - opts.addOption(Client.OPT_TF_SERVER_JAR, true, "Provide container jar of tensorflow"); + opts.addOption(TFApplication.OPT_TF_CLIENT, true, "Provide client python of tensorflow"); + opts.addOption(TFApplication.OPT_TF_SERVER_JAR, true, "Provide container jar of tensorflow"); LOG.info("print args"); for (String arg : args) { - LOG.info(arg); + LOG.info(arg); } CommandLine cliParser = new GnuParser().parse(opts, args); @@ -323,14 +340,14 @@ public boolean init(String[] args) throws ParseException, IOException { if (args.length == 0) { printUsage(opts); throw new IllegalArgumentException( - "No args specified for application master to initialize"); + "No args specified for application master to initialize"); } //Check whether customer log4j.properties file exists if (fileExist(log4jPath)) { try { Log4jPropertyHelper.updateLog4jConfiguration(ApplicationMaster.class, - log4jPath); + log4jPath); } catch (Exception e) { LOG.warn("Can not set up custom log4j properties. " + e); } @@ -353,77 +370,76 @@ public boolean init(String[] args) throws ParseException, IOException { appAttemptID = ApplicationAttemptId.fromString(appIdStr); } else { throw new IllegalArgumentException( - "Application Attempt Id not set in the environment"); + "Application Attempt Id not set in the environment"); } } else { ContainerId containerId = ContainerId.fromString(envs - .get(Environment.CONTAINER_ID.name())); + .get(Environment.CONTAINER_ID.name())); appAttemptID = containerId.getApplicationAttemptId(); } if (!envs.containsKey(ApplicationConstants.APP_SUBMIT_TIME_ENV)) { throw new RuntimeException(ApplicationConstants.APP_SUBMIT_TIME_ENV - + " not set in the environment"); + + " not set in the environment"); } if (!envs.containsKey(Environment.NM_HOST.name())) { throw new RuntimeException(Environment.NM_HOST.name() - + " not set in the environment"); + + " not set in the environment"); } if (!envs.containsKey(Environment.NM_HTTP_PORT.name())) { throw new RuntimeException(Environment.NM_HTTP_PORT - + " not set in the environment"); + + " not set in the environment"); } if (!envs.containsKey(Environment.NM_PORT.name())) { throw new RuntimeException(Environment.NM_PORT.name() - + " not set in the environment"); + + " not set in the environment"); } LOG.info("Application master for app" + ", appId=" - + appAttemptID.getApplicationId().getId() + ", clustertimestamp=" - + appAttemptID.getApplicationId().getClusterTimestamp() - + ", attemptId=" + appAttemptID.getAttemptId()); + + appAttemptID.getApplicationId().getId() + ", clustertimestamp=" + + appAttemptID.getApplicationId().getClusterTimestamp() + + ", attemptId=" + appAttemptID.getAttemptId()); containerMemory = Integer.parseInt(cliParser.getOptionValue( - "container_memory", "10")); + "container_memory", "10")); containerVirtualCores = Integer.parseInt(cliParser.getOptionValue( - "container_vcores", "1")); + "container_vcores", "1")); numTotalContainers = Integer.parseInt(cliParser.getOptionValue( - "num_containers", "1")); + "num_containers", "1")); if (numTotalContainers == 0) { throw new IllegalArgumentException( - "Cannot run distributed shell with no containers"); + "Cannot run distributed shell with no containers"); } requestPriority = Integer.parseInt(cliParser - .getOptionValue("priority", "0")); + .getOptionValue("priority", "0")); containerRetryPolicy = ContainerRetryPolicy.values()[ - Integer.parseInt(cliParser.getOptionValue( - "container_retry_policy", "0"))]; + Integer.parseInt(cliParser.getOptionValue( + "container_retry_policy", "0"))]; if (cliParser.hasOption("container_retry_error_codes")) { containerRetryErrorCodes = new HashSet<>(); for (String errorCode : - cliParser.getOptionValue("container_retry_error_codes").split(",")) { + cliParser.getOptionValue("container_retry_error_codes").split(",")) { containerRetryErrorCodes.add(Integer.parseInt(errorCode)); } } containerMaxRetries = Integer.parseInt( - cliParser.getOptionValue("container_max_retries", "0")); + cliParser.getOptionValue("container_max_retries", "0")); containrRetryInterval = Integer.parseInt(cliParser.getOptionValue( - "container_retry_interval", "0")); - - if (cliParser.hasOption(Client.OPT_TF_CLIENT)) { + "container_retry_interval", "0")); - tfClientPy = cliParser.getOptionValue(Client.OPT_TF_CLIENT); - if (null == tfClientPy) { - LOG.error("tf_client is empty!"); - tfClientPy = TFClient.DEFAULT_TF_CLIENT_PY; - } - } + if (cliParser.hasOption(TFApplication.OPT_TF_CLIENT)) { - if (cliParser.hasOption(Client.OPT_TF_SERVER_JAR)) { - tfServerJar = cliParser.getOptionValue(Client.OPT_TF_SERVER_JAR); + tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT); + if (null == tfClientPy) { + LOG.error("tf_client is empty!"); + tfClientPy = TFClient.TF_CLIENT_PY; } + } + if (cliParser.hasOption(TFApplication.OPT_TF_SERVER_JAR)) { + tfServerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR); + } if (YarnConfiguration.timelineServiceEnabled(conf)) { timelineServiceV2 = YarnConfiguration.timelineServiceV2Enabled(conf); @@ -457,7 +473,7 @@ public void run() throws YarnException, IOException, InterruptedException { // Note: Credentials, Token, UserGroupInformation, DataOutputBuffer class // are marked as LimitedPrivate Credentials credentials = - UserGroupInformation.getCurrentUser().getCredentials(); + UserGroupInformation.getCurrentUser().getCredentials(); DataOutputBuffer dob = new DataOutputBuffer(); credentials.writeTokenStorageToStream(dob); // Now remove the AM->RM token so that containers cannot access it. @@ -474,13 +490,13 @@ public void run() throws YarnException, IOException, InterruptedException { // Create appSubmitterUgi and add original tokens to it String appSubmitterUserName = - System.getenv(ApplicationConstants.Environment.USER.name()); + System.getenv(ApplicationConstants.Environment.USER.name()); appSubmitterUgi = - UserGroupInformation.createRemoteUser(appSubmitterUserName); + UserGroupInformation.createRemoteUser(appSubmitterUserName); appSubmitterUgi.addCredentials(credentials); AMRMClientAsync.AbstractCallbackHandler allocListener = - new RMCallbackHandler(); + new RMCallbackHandler(); amRMClient = AMRMClientAsync.createAMRMClientAsync(1000, allocListener); amRMClient.init(conf); amRMClient.start(); @@ -498,10 +514,10 @@ public void run() throws YarnException, IOException, InterruptedException { if(timelineClient != null) { if (timelineServiceV2) { publishApplicationAttemptEventOnTimelineServiceV2( - DSEvent.DS_APP_ATTEMPT_START); + DSEvent.DS_APP_ATTEMPT_START); } else { publishApplicationAttemptEvent(timelineClient, appAttemptID.toString(), - DSEvent.DS_APP_ATTEMPT_START, domainId, appSubmitterUgi); + DSEvent.DS_APP_ATTEMPT_START, domainId, appSubmitterUgi); } } @@ -515,35 +531,35 @@ public void run() throws YarnException, IOException, InterruptedException { // This will start heartbeating to the RM appMasterHostname = NetUtils.getHostname(); RegisterApplicationMasterResponse response = amRMClient - .registerApplicationMaster(appMasterHostname, appMasterRpcPort, - appMasterTrackingUrl); + .registerApplicationMaster(appMasterHostname, appMasterRpcPort, + appMasterTrackingUrl); // Dump out information about cluster capability as seen by the // resource manager long maxMem = response.getMaximumResourceCapability().getMemorySize(); LOG.info("Max mem capability of resources in this cluster " + maxMem); - + int maxVCores = response.getMaximumResourceCapability().getVirtualCores(); LOG.info("Max vcores capability of resources in this cluster " + maxVCores); // A resource ask cannot exceed the max. if (containerMemory > maxMem) { LOG.info("Container memory specified above max threshold of cluster." - + " Using max value." + ", specified=" + containerMemory + ", max=" - + maxMem); + + " Using max value." + ", specified=" + containerMemory + ", max=" + + maxMem); containerMemory = maxMem; } if (containerVirtualCores > maxVCores) { LOG.info("Container virtual cores specified above max threshold of cluster." - + " Using max value." + ", specified=" + containerVirtualCores + ", max=" - + maxVCores); + + " Using max value." + ", specified=" + containerVirtualCores + ", max=" + + maxVCores); containerVirtualCores = maxVCores; } List previousAMRunningContainers = - response.getContainersFromPreviousAttempts(); + response.getContainersFromPreviousAttempts(); LOG.info(appAttemptID + " received " + previousAMRunningContainers.size() - + " previous attempts' running containers on AM registration."); + + " previous attempts' running containers on AM registration."); for(Container container: previousAMRunningContainers) { launchedContainers.add(container.getId()); } @@ -551,7 +567,7 @@ public void run() throws YarnException, IOException, InterruptedException { int numTotalContainersToRequest = - numTotalContainers - previousAMRunningContainers.size(); + numTotalContainers - previousAMRunningContainers.size(); // Setup ask for containers from RM // Send request for containers to RM // Until we get our fully allocated quota, we keep on polling RM for @@ -564,28 +580,25 @@ public void run() throws YarnException, IOException, InterruptedException { } numRequestedContainers.set(numTotalContainers); - createTFClientThread(); + //createTFClientThread(); } - private class TFClientThread implements Runnable { + private void createTFClientThread() { + Thread thread = new Thread(new Runnable() { @Override public void run() { LOG.info("tensorflow client will be run!"); - TFClient tfClient = new TFClient(tfClientPy); - tfClient.run(); + TFClient tfClient = new TFClient(TFClient.TF_CLIENT_PY); + tfClient.run(); } - } - - private void createTFClientThread() { - - Thread thread = new Thread(new TFClientThread()); - thread.start(); + }); + thread.start(); } @VisibleForTesting void startTimelineClient(final Configuration conf) - throws YarnException, IOException, InterruptedException { + throws YarnException, IOException, InterruptedException { try { appSubmitterUgi.doAs(new PrivilegedExceptionAction() { @Override @@ -594,7 +607,7 @@ public Void run() throws Exception { // Creating the Timeline Client if (timelineServiceV2) { timelineClient = TimelineClient.createTimelineClient( - appAttemptID.getApplicationId()); + appAttemptID.getApplicationId()); LOG.info("Timeline service V2 client is enabled"); } else { timelineClient = TimelineClient.createTimelineClient(); @@ -623,7 +636,7 @@ NMCallbackHandler createNMCallbackHandler() { protected boolean finish() { // wait for completion. while (!done - && (numCompletedContainers.get() != numTotalContainers)) { + && (numCompletedContainers.get() != numTotalContainers)) { try { Thread.sleep(200); } catch (InterruptedException ex) {} @@ -632,10 +645,10 @@ protected boolean finish() { if (timelineClient != null) { if (timelineServiceV2) { publishApplicationAttemptEventOnTimelineServiceV2( - DSEvent.DS_APP_ATTEMPT_END); + DSEvent.DS_APP_ATTEMPT_END); } else { publishApplicationAttemptEvent(timelineClient, appAttemptID.toString(), - DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi); + DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi); } } @@ -662,15 +675,16 @@ protected boolean finish() { FinalApplicationStatus appStatus; String appMessage = null; boolean success = true; + LOG.info(">>>>>>>>>>>>>>>>>>>>>numCompletedContainers: " + numCompletedContainers.get() + "numFailedContainers: " + numFailedContainers.get() + "total " + numTotalContainers); if (numCompletedContainers.get() - numFailedContainers.get() - >= numTotalContainers) { + >= numTotalContainers) { appStatus = FinalApplicationStatus.SUCCEEDED; } else { appStatus = FinalApplicationStatus.FAILED; appMessage = "Diagnostics." + ", total=" + numTotalContainers - + ", completed=" + numCompletedContainers.get() + ", allocated=" - + numAllocatedContainers.get() + ", failed=" - + numFailedContainers.get(); + + ", completed=" + numCompletedContainers.get() + ", allocated=" + + numAllocatedContainers.get() + ", failed=" + + numFailedContainers.get(); LOG.info(appMessage); success = false; } @@ -681,7 +695,7 @@ protected boolean finish() { } catch (IOException e) { LOG.error("Failed to unregister application", e); } - + amRMClient.stop(); // Stop Timeline Client @@ -692,19 +706,90 @@ protected boolean finish() { return success; } + public boolean startAllContainers() throws Exception { + if (numAllocatedContainers.get() == numTotalContainers) { + + ClusterSpec clusterSpec = ClusterSpec.makeClusterSpec(); + int numWorkerContainers = 0; + int numPsContainers = 0; + if (this.allocatedContainers.size() < numTotalWokerContainers + numTotalParamServerContainer) { + LOG.error("not enough ps and woker containers allocated!"); + return false; + } + + for (Container allocatedContainer : this.allocatedContainers) { + if (numWorkerContainers < numTotalWokerContainers) { + LOG.info("work cid: " + allocatedContainer.getId().toString()); + clusterSpec.addWorkerSpec(allocatedContainer.getId().toString(), allocatedContainer.getNodeId().getHost()); + numWorkerContainers++; + continue; + } + + if (numPsContainers < this.numTotalParamServerContainer) { + LOG.info("ps cid: " + allocatedContainer.getId().toString()); + clusterSpec.addPsSpec(allocatedContainer.getId().toString(), allocatedContainer.getNodeId().getHost()); + numPsContainers++; + } + + } + + for (Container allocatedContainer : this.allocatedContainers) { + + String yarnShellId = Integer.toString(yarnShellIdCounter); + yarnShellIdCounter++; + LOG.info("Launching shell command on a new container." + + ", containerId=" + allocatedContainer.getId() + + ", yarnShellId=" + yarnShellId + + ", containerNode=" + allocatedContainer.getNodeId().getHost() + + ":" + allocatedContainer.getNodeId().getPort() + + ", containerNodeURI=" + allocatedContainer.getNodeHttpAddress() + + ", containerResourceMemory" + + allocatedContainer.getResource().getMemorySize() + + ", containerResourceVirtualCores" + + allocatedContainer.getResource().getVirtualCores()); + // + ", containerToken" + // +allocatedContainer.getContainerToken().getIdentifier().toString()); + + LOG.info("server cid: " + allocatedContainer.getId().toString()); + LaunchContainerThread runnable = new LaunchContainerThread(allocatedContainer, + yarnShellId, + tfServerJar, + containerMemory, + containerRetryPolicy, + containerRetryErrorCodes, + containerMaxRetries, + containrRetryInterval, + this, + containerListener, clusterSpec.getServerAddress(allocatedContainer.getId().toString())); + Thread launchThread = new Thread(runnable); + + // launch and start the container on a separate thread to keep + // the main thread unblocked + // as all containers may not be allocated at one go. + launchThreads.add(launchThread); + launchedContainers.add(allocatedContainer.getId()); + launchThread.start(); + return true; + } + } else { + throw new Exception("containers are not allocated!"); + } + return false; + } + @VisibleForTesting class RMCallbackHandler extends AMRMClientAsync.AbstractCallbackHandler { @SuppressWarnings("unchecked") @Override public void onContainersCompleted(List completedContainers) { LOG.info("Got response from RM for container ask, completedCnt=" - + completedContainers.size()); + + completedContainers.size()); for (ContainerStatus containerStatus : completedContainers) { LOG.info(appAttemptID + " got container status for containerID=" - + containerStatus.getContainerId() + ", state=" - + containerStatus.getState() + ", exitStatus=" - + containerStatus.getExitStatus() + ", diagnostics=" - + containerStatus.getDiagnostics()); + + containerStatus.getContainerId() + ", state=" + + containerStatus.getState() + ", exitStatus=" + + containerStatus.getExitStatus() + ", diagnostics=" + + containerStatus.getDiagnostics()); // non complete containers should not be here assert (containerStatus.getState() == ContainerState.COMPLETE); @@ -712,8 +797,8 @@ public void onContainersCompleted(List completedContainers) { // attempt if (!launchedContainers.contains(containerStatus.getContainerId())) { LOG.info("Ignoring completed status of " - + containerStatus.getContainerId() - + "; unknown container(probably launched by previous attempt)"); + + containerStatus.getContainerId() + + "; unknown container(probably launched by previous attempt)"); continue; } @@ -739,18 +824,18 @@ public void onContainersCompleted(List completedContainers) { // container completed successfully numCompletedContainers.incrementAndGet(); LOG.info("Container completed successfully." + ", containerId=" - + containerStatus.getContainerId()); + + containerStatus.getContainerId()); } if(timelineClient != null) { if (timelineServiceV2) { publishContainerEndEventOnTimelineServiceV2(containerStatus); } else { publishContainerEndEvent( - timelineClient, containerStatus, domainId, appSubmitterUgi); + timelineClient, containerStatus, domainId, appSubmitterUgi); } } } - + // ask for more containers if any failed int askCount = numTotalContainers - numRequestedContainers.get(); numRequestedContainers.addAndGet(askCount); @@ -761,7 +846,7 @@ public void onContainersCompleted(List completedContainers) { amRMClient.addContainerRequest(containerAsk); } } - + if (numCompletedContainers.get() == numTotalContainers) { done = true; } @@ -770,39 +855,21 @@ public void onContainersCompleted(List completedContainers) { @Override public void onContainersAllocated(List allocatedContainers) { LOG.info("Got response from RM for container ask, allocatedCnt=" - + allocatedContainers.size()); + + allocatedContainers.size()); numAllocatedContainers.addAndGet(allocatedContainers.size()); - for (Container allocatedContainer : allocatedContainers) { - String yarnShellId = Integer.toString(yarnShellIdCounter); - yarnShellIdCounter++; - LOG.info("Launching shell command on a new container." - + ", containerId=" + allocatedContainer.getId() - + ", yarnShellId=" + yarnShellId - + ", containerNode=" + allocatedContainer.getNodeId().getHost() - + ":" + allocatedContainer.getNodeId().getPort() - + ", containerNodeURI=" + allocatedContainer.getNodeHttpAddress() - + ", containerResourceMemory" - + allocatedContainer.getResource().getMemorySize() - + ", containerResourceVirtualCores" - + allocatedContainer.getResource().getVirtualCores()); - // + ", containerToken" - // +allocatedContainer.getContainerToken().getIdentifier().toString()); - - Thread launchThread = createLaunchContainerThread(allocatedContainer, - yarnShellId); - - // launch and start the container on a separate thread to keep - // the main thread unblocked - // as all containers may not be allocated at one go. - launchThreads.add(launchThread); - launchedContainers.add(allocatedContainer.getId()); - launchThread.start(); + ApplicationMaster.this.allocatedContainers.addAll(allocatedContainers); + if (numAllocatedContainers.get() == numTotalContainers) { + try { + startAllContainers(); + } catch (Exception e) { + e.printStackTrace(); + } } } @Override public void onContainersUpdated( - List containers) {} + List containers) {} @Override public void onShutdownRequest() { @@ -816,7 +883,7 @@ public void onNodesUpdated(List updatedNodes) {} public float getProgress() { // set progress to deliver to RM on next heartbeat float progress = (float) numCompletedContainers.get() - / numTotalContainers; + / numTotalContainers; return progress; } @@ -832,7 +899,7 @@ public void onError(Throwable e) { static class NMCallbackHandler extends NMClientAsync.AbstractCallbackHandler { private ConcurrentMap containers = - new ConcurrentHashMap(); + new ConcurrentHashMap(); private final ApplicationMaster applicationMaster; public NMCallbackHandler(ApplicationMaster applicationMaster) { @@ -853,39 +920,39 @@ public void onContainerStopped(ContainerId containerId) { @Override public void onContainerStatusReceived(ContainerId containerId, - ContainerStatus containerStatus) { + ContainerStatus containerStatus) { if (LOG.isDebugEnabled()) { LOG.debug("Container Status: id=" + containerId + ", status=" + - containerStatus); + containerStatus); } } @Override public void onContainerStarted(ContainerId containerId, - Map allServiceResponse) { + Map allServiceResponse) { if (LOG.isDebugEnabled()) { LOG.debug("Succeeded to start Container " + containerId); } Container container = containers.get(containerId); if (container != null) { applicationMaster.nmClientAsync.getContainerStatusAsync( - containerId, container.getNodeId()); + containerId, container.getNodeId()); } if(applicationMaster.timelineClient != null) { if (applicationMaster.timelineServiceV2) { applicationMaster.publishContainerStartEventOnTimelineServiceV2( - container); + container); } else { applicationMaster.publishContainerStartEvent( - applicationMaster.timelineClient, container, - applicationMaster.domainId, applicationMaster.appSubmitterUgi); + applicationMaster.timelineClient, container, + applicationMaster.domainId, applicationMaster.appSubmitterUgi); } } } @Override public void onContainerResourceIncreased( - ContainerId containerId, Resource resource) {} + ContainerId containerId, Resource resource) {} @Override public void onStartContainerError(ContainerId containerId, Throwable t) { @@ -897,7 +964,7 @@ public void onStartContainerError(ContainerId containerId, Throwable t) { @Override public void onGetContainerStatusError( - ContainerId containerId, Throwable t) { + ContainerId containerId, Throwable t) { LOG.error("Failed to query the status of Container " + containerId); } @@ -909,97 +976,11 @@ public void onStopContainerError(ContainerId containerId, Throwable t) { @Override public void onIncreaseContainerResourceError( - ContainerId containerId, Throwable t) {} + ContainerId containerId, Throwable t) {} } - /** - * Thread to connect to the {@link ContainerManagementProtocol} and launch the container - * that will execute the shell command. - */ - private class LaunchContainerRunnable implements Runnable { - - // Allocated container - private Container container; - private String shellId; - - NMCallbackHandler containerListener; - - /** - * @param lcontainer Allocated container - * @param containerListener Callback handler of the container - */ - public LaunchContainerRunnable(Container lcontainer, - NMCallbackHandler containerListener, String shellId) { - this.container = lcontainer; - this.containerListener = containerListener; - this.shellId = shellId; - } - @Override - /** - * Connects to CM, sets up container launch context - * for shell command and eventually dispatches the container - * start request to the CM. - */ - public void run() { - LOG.info("Setting up container launch container for containerid=" - + container.getId() + " with shellid=" + shellId); - - // Set the env variables to be setup in the env where the tf servers will be run - LOG.info("Set the environment for the tf servers"); - - FileSystem fs = null; - try { - fs = FileSystem.get(conf); - } catch (IOException e) { - e.printStackTrace(); - } - - TFContainer tfContainer = new TFContainer(ApplicationMaster.this); - // Set the java environment - Map env = tfContainer.setJavaEnv(conf, tfServerJar); - - // Set the local resources - Map localResources = new HashMap(); - - ApplicationId appId = appAttemptID.getApplicationId(); - String tfServerPy = "tf_server.py"; - try { - tfContainer.addToLocalResources(fs, "/home/muzhongz/" + tfServerPy, TFContainer.SERVER_PY_PATH, appId.toString(), - localResources, null); - } catch (IOException e) { - e.printStackTrace(); - } - - StringBuilder command = tfContainer.makeCommands(containerMemory); - - List commands = new ArrayList(); - commands.add(command.toString()); - - // Set up ContainerLaunchContext, setting local resource, environment, - // command and token for constructor. - - // Note for tokens: Set up tokens for the container too. Today, for normal - // shell commands, the container in distribute-shell doesn't need any - // tokens. We are populating them mainly for NodeManagers to be able to - // download anyfiles in the distributed file-system. The tokens are - // otherwise also useful in cases, for e.g., when one is running a - // "hadoop dfs" command inside the distributed shell. - - ContainerRetryContext containerRetryContext = - ContainerRetryContext.newInstance( - containerRetryPolicy, containerRetryErrorCodes, - containerMaxRetries, containrRetryInterval); - LOG.info("complete container command is " + command.toString()); - ContainerLaunchContext ctx = ContainerLaunchContext.newInstance( - localResources, env, commands, null, allTokens.duplicate(), - null, containerRetryContext); - containerListener.addContainer(container.getId(), container); - nmClientAsync.startContainerAsync(container, ctx); - } - - } /** * Setup the request that will be sent to the RM for the container ask. @@ -1016,10 +997,10 @@ private ContainerRequest setupContainerAskForRM() { // Set up resource type requirements // For now, memory and CPU are supported so we set memory and cpu requirements Resource capability = Resource.newInstance(containerMemory, - containerVirtualCores); + containerVirtualCores); ContainerRequest request = new ContainerRequest(capability, null, null, - pri); + pri); LOG.info("Requested container ask: " + request.toString()); return request; } @@ -1039,15 +1020,15 @@ private String readContent(String filePath) throws IOException { } private void publishContainerStartEvent( - final TimelineClient timelineClient, final Container container, - String domainId, UserGroupInformation ugi) { + final TimelineClient timelineClient, final Container container, + String domainId, UserGroupInformation ugi) { final TimelineEntity entity = new TimelineEntity(); entity.setEntityId(container.getId().toString()); entity.setEntityType(DSEntity.DS_CONTAINER.toString()); entity.setDomainId(domainId); entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName()); entity.addPrimaryFilter(APPID_TIMELINE_FILTER_NAME, container.getId() - .getApplicationAttemptId().getApplicationId().toString()); + .getApplicationAttemptId().getApplicationId().toString()); TimelineEvent event = new TimelineEvent(); event.setTimestamp(System.currentTimeMillis()); event.setEventType(DSEvent.DS_CONTAINER_START.toString()); @@ -1057,27 +1038,27 @@ private void publishContainerStartEvent( try { processTimelineResponseErrors( - putContainerEntity(timelineClient, - container.getId().getApplicationAttemptId(), - entity)); + putContainerEntity(timelineClient, + container.getId().getApplicationAttemptId(), + entity)); } catch (YarnException | IOException | ClientHandlerException e) { LOG.error("Container start event could not be published for " - + container.getId().toString(), e); + + container.getId().toString(), e); } } @VisibleForTesting void publishContainerEndEvent( - final TimelineClient timelineClient, ContainerStatus container, - String domainId, UserGroupInformation ugi) { + final TimelineClient timelineClient, ContainerStatus container, + String domainId, UserGroupInformation ugi) { final TimelineEntity entity = new TimelineEntity(); entity.setEntityId(container.getContainerId().toString()); entity.setEntityType(DSEntity.DS_CONTAINER.toString()); entity.setDomainId(domainId); entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName()); entity.addPrimaryFilter(APPID_TIMELINE_FILTER_NAME, - container.getContainerId().getApplicationAttemptId() - .getApplicationId().toString()); + container.getContainerId().getApplicationAttemptId() + .getApplicationId().toString()); TimelineEvent event = new TimelineEvent(); event.setTimestamp(System.currentTimeMillis()); event.setEventType(DSEvent.DS_CONTAINER_END.toString()); @@ -1086,23 +1067,23 @@ void publishContainerEndEvent( entity.addEvent(event); try { processTimelineResponseErrors( - putContainerEntity(timelineClient, - container.getContainerId().getApplicationAttemptId(), - entity)); + putContainerEntity(timelineClient, + container.getContainerId().getApplicationAttemptId(), + entity)); } catch (YarnException | IOException | ClientHandlerException e) { LOG.error("Container end event could not be published for " - + container.getContainerId().toString(), e); + + container.getContainerId().toString(), e); } } private TimelinePutResponse putContainerEntity( - TimelineClient timelineClient, ApplicationAttemptId currAttemptId, - TimelineEntity entity) - throws YarnException, IOException { + TimelineClient timelineClient, ApplicationAttemptId currAttemptId, + TimelineEntity entity) + throws YarnException, IOException { if (TimelineUtils.timelineServiceV1_5Enabled(conf)) { TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance( - currAttemptId.getApplicationId(), - CONTAINER_ENTITY_GROUP_ID); + currAttemptId.getApplicationId(), + CONTAINER_ENTITY_GROUP_ID); return timelineClient.putEntities(currAttemptId, groupId, entity); } else { return timelineClient.putEntities(entity); @@ -1110,8 +1091,8 @@ private TimelinePutResponse putContainerEntity( } private void publishApplicationAttemptEvent( - final TimelineClient timelineClient, String appAttemptId, - DSEvent appEvent, String domainId, UserGroupInformation ugi) { + final TimelineClient timelineClient, String appAttemptId, + DSEvent appEvent, String domainId, UserGroupInformation ugi) { final TimelineEntity entity = new TimelineEntity(); entity.setEntityId(appAttemptId); entity.setEntityType(DSEntity.DS_APP_ATTEMPT.toString()); @@ -1126,23 +1107,23 @@ private void publishApplicationAttemptEvent( processTimelineResponseErrors(response); } catch (YarnException | IOException | ClientHandlerException e) { LOG.error("App Attempt " - + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? "start" : "end") - + " event could not be published for " - + appAttemptID, e); + + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? "start" : "end") + + " event could not be published for " + + appAttemptID, e); } } private TimelinePutResponse processTimelineResponseErrors( - TimelinePutResponse response) { + TimelinePutResponse response) { List errors = response.getErrors(); if (errors.size() == 0) { LOG.debug("Timeline entities are successfully put"); } else { for (TimelinePutResponse.TimelinePutError error : errors) { LOG.error( - "Error when publishing entity [" + error.getEntityType() + "," - + error.getEntityId() + "], server side error code: " - + error.getErrorCode()); + "Error when publishing entity [" + error.getEntityType() + "," + + error.getEntityId() + "], server side error code: " + + error.getErrorCode()); } } return response; @@ -1167,21 +1148,12 @@ boolean getDone() { return done; } - @VisibleForTesting - Thread createLaunchContainerThread(Container allocatedContainer, - String shellId) { - LaunchContainerRunnable runnableLaunchContainer = - new LaunchContainerRunnable(allocatedContainer, containerListener, - shellId); - return new Thread(runnableLaunchContainer); - } - private void publishContainerStartEventOnTimelineServiceV2( - Container container) { + Container container) { final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity - entity = + entity = new org.apache.hadoop.yarn.api.records.timelineservice. - TimelineEntity(); + TimelineEntity(); entity.setId(container.getId().toString()); entity.setType(DSEntity.DS_CONTAINER.toString()); long ts = System.currentTimeMillis(); @@ -1189,7 +1161,7 @@ private void publishContainerStartEventOnTimelineServiceV2( entity.addInfo("user", appSubmitterUgi.getShortUserName()); org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); + new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); event.setTimestamp(ts); event.setId(DSEvent.DS_CONTAINER_START.toString()); event.addInfo("Node", container.getNodeId().toString()); @@ -1206,23 +1178,23 @@ public TimelinePutResponse run() throws Exception { }); } catch (Exception e) { LOG.error("Container start event could not be published for " - + container.getId().toString(), - e instanceof UndeclaredThrowableException ? e.getCause() : e); + + container.getId().toString(), + e instanceof UndeclaredThrowableException ? e.getCause() : e); } } private void publishContainerEndEventOnTimelineServiceV2( - final ContainerStatus container) { + final ContainerStatus container) { final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity - entity = + entity = new org.apache.hadoop.yarn.api.records.timelineservice. - TimelineEntity(); + TimelineEntity(); entity.setId(container.getContainerId().toString()); entity.setType(DSEntity.DS_CONTAINER.toString()); //entity.setDomainId(domainId); entity.addInfo("user", appSubmitterUgi.getShortUserName()); org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); + new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); event.setTimestamp(System.currentTimeMillis()); event.setId(DSEvent.DS_CONTAINER_END.toString()); event.addInfo("State", container.getState().name()); @@ -1239,17 +1211,17 @@ public TimelinePutResponse run() throws Exception { }); } catch (Exception e) { LOG.error("Container end event could not be published for " - + container.getContainerId().toString(), - e instanceof UndeclaredThrowableException ? e.getCause() : e); + + container.getContainerId().toString(), + e instanceof UndeclaredThrowableException ? e.getCause() : e); } } private void publishApplicationAttemptEventOnTimelineServiceV2( - DSEvent appEvent) { + DSEvent appEvent) { final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity - entity = + entity = new org.apache.hadoop.yarn.api.records.timelineservice. - TimelineEntity(); + TimelineEntity(); entity.setId(appAttemptID.toString()); entity.setType(DSEntity.DS_APP_ATTEMPT.toString()); long ts = System.currentTimeMillis(); @@ -1258,7 +1230,7 @@ private void publishApplicationAttemptEventOnTimelineServiceV2( } entity.addInfo("user", appSubmitterUgi.getShortUserName()); org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); + new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); event.setId(appEvent.toString()); event.setTimestamp(ts); entity.addEvent(event); @@ -1273,10 +1245,10 @@ public TimelinePutResponse run() throws Exception { }); } catch (Exception e) { LOG.error("App Attempt " - + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? "start" : "end") - + " event could not be published for " - + appAttemptID, - e instanceof UndeclaredThrowableException ? e.getCause() : e); + + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? "start" : "end") + + " event could not be published for " + + appAttemptID, + e instanceof UndeclaredThrowableException ? e.getCause() : e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index b8fa744f74156..b68dc36eea872 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -19,24 +19,17 @@ package org.apache.hadoop.yarn.applications.tensorflow; import org.apache.commons.cli.*; -import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.ApplicationClientProtocol; -import org.apache.hadoop.yarn.api.ApplicationConstants; -import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.records.*; @@ -111,6 +104,8 @@ public String getAppName() { private String appMasterJar = ""; private String tfConatinerJar = ""; + + private String tfServerPy = ""; // Main class to invoke application master private final String appMasterMainClass; @@ -133,9 +128,6 @@ public String getAppName() { // Timeout threshold for client. Kill app after time interval expires. private long clientTimeout = 600000; - // flag to indicate whether to keep containers across application attempts. - private boolean keepContainers = false; - private long attemptFailuresValidityInterval = -1; private Vector containerRetryOptions = new Vector<>(5); @@ -170,8 +162,8 @@ public String getAppName() { public static final String SCRIPT_PATH = "ExecScript"; - public static final String OPT_TF_CLIENT = "tf_client"; - public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; + //public static final String OPT_TF_CLIENT = "tf_client"; + //public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; /** * @param args Command line arguments @@ -230,11 +222,6 @@ public Client(Configuration conf) throws Exception { opts.addOption("container_vcores", true, "Amount of virtual cores to be requested to run a tensorflow worker"); opts.addOption("num_containers", true, "No. of containers on which tensorflow workers to run"); opts.addOption("log_properties", true, "log4j.properties file"); - opts.addOption("keep_containers_across_application_attempts", false, - "Flag to indicate whether to keep containers across application attempts." + - " If the flag is true, running containers will not be killed when" + - " application attempt fails and these containers will be retrieved by" + - " the new application attempt "); opts.addOption("attempt_failures_validity_interval", true, "when attempt_failures_validity_interval in milliseconds is set to > 0," + "the failure number will not take failures which happen out of " + @@ -275,10 +262,12 @@ public Client(Configuration conf) throws Exception { "If container could retry, it specifies max retires"); opts.addOption("container_retry_interval", true, "Interval between each retry, unit is milliseconds"); - opts.addOption(OPT_TF_CLIENT, true, + opts.addOption(TFApplication.OPT_TF_CLIENT, true, "Provide client python of tensorflow"); - opts.addOption(OPT_TF_SERVER_JAR, true, + opts.addOption(TFApplication.OPT_TF_SERVER_JAR, true, "Provide server jar of tensorflow"); + opts.addOption(TFApplication.OPT_TF_SERVER_PY, true, + "Provide server pyscript of tensorflow"); } /** @@ -327,18 +316,14 @@ public boolean init(String[] args) throws ParseException { } - if (cliParser.hasOption("keep_containers_across_application_attempts")) { - LOG.info("keep_containers_across_application_attempts"); - keepContainers = true; - } - appName = cliParser.getOptionValue("appname", "tensorflow"); amPriority = Integer.parseInt(cliParser.getOptionValue("priority", "0")); amQueue = cliParser.getOptionValue("queue", "default"); amMemory = Integer.parseInt(cliParser.getOptionValue("master_memory", "100")); amVCores = Integer.parseInt(cliParser.getOptionValue("master_vcores", "1")); - tfClientPy = cliParser.getOptionValue(OPT_TF_CLIENT, TFClient.DEFAULT_TF_CLIENT_PY); - tfConatinerJar = cliParser.getOptionValue(OPT_TF_SERVER_JAR); + tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT, TFClient.TF_CLIENT_PY); + tfConatinerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR, TFAmContainer.APPMASTER_JAR_PATH); + tfServerPy = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_PY, TFContainer.SERVER_PY_PATH); if (amMemory < 0) { throw new IllegalArgumentException("Invalid memory specified for application master, exiting." @@ -357,11 +342,11 @@ public boolean init(String[] args) throws ParseException { - if (!cliParser.hasOption(OPT_TF_CLIENT)) { + if (!cliParser.hasOption(TFApplication.OPT_TF_CLIENT)) { throw new IllegalArgumentException( "No tensorflow client specified to be executed by application master"); } else { - tfClientPy = cliParser.getOptionValue(OPT_TF_CLIENT); + tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT); } containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "256")); @@ -514,7 +499,6 @@ public boolean run() throws IOException, YarnException { ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext(); ApplicationId appId = appContext.getApplicationId(); - appContext.setKeepContainersAcrossApplicationAttempts(keepContainers); appContext.setApplicationName(appName); if (attemptFailuresValidityInterval >= 0) { @@ -545,7 +529,12 @@ public boolean run() throws IOException, YarnException { // Copy the application master jar to the filesystem // Create a local resource to point to the destination jar path FileSystem fs = FileSystem.get(conf); - tfAmContainer.addToLocalResources(fs, appMasterJar, appMasterJarPath, appId.toString(), + tfAmContainer.addToLocalResources(fs, appMasterJar, TFAmContainer.APPMASTER_JAR_PATH, appId.toString(), + localResources, null); + + tfAmContainer.addToLocalResources(fs, tfServerPy, TFContainer.SERVER_PY_PATH, appId.toString(), + localResources, null); + tfAmContainer.addToLocalResources(fs, tfClientPy, TFClient.TF_CLIENT_PY, appId.toString(), localResources, null); // Set the log4j properties if needed @@ -632,6 +621,10 @@ public boolean run() throws IOException, YarnException { yarnClient.submitApplication(appContext); + // run tensorflow client python script + TFClient tfClient = new TFClient(tfClientPy); + tfClient.startTensorflowClient(); + // TODO // Try submitting the same request again // app submission failure? diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java index 4a0b75429c98e..24081eeee5476 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java @@ -1,5 +1,221 @@ package org.apache.hadoop.yarn.applications.tensorflow; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.commons.codec.binary.Base64; + +import java.io.IOException; +import java.util.*; + public class ClusterSpec { + private static final Log LOG = LogFactory.getLog(ClusterSpec.class); + private Map workers = null; + private Map paramServers = null; + private TFWorkerAddress tfMasterNode = null; + private int serverPortNext = PORT_FLOOR; + + private static final int PORT_FLOOR = 20000; + private static final int PORT_CEILING = 25000; + + public static final String WORKER = "worker"; + public static final String PS = "ps"; + + public static ClusterSpec makeClusterSpec() { + return new ClusterSpec(); + } + + private ClusterSpec() { + workers = new HashMap<>(); + paramServers = new HashMap<>(); + serverPortNext = PORT_FLOOR + ((int)(Math.random() * (PORT_CEILING - PORT_FLOOR)) + 1); + } + + private int nextRandomPort() { + int port = serverPortNext; + serverPortNext = serverPortNext + 2; + return port; + } + + private int maxTaskIndexOfWorkerInSameNode(String hostName) { + int baseIndex = 0; + for (TFWorkerAddress sv : workers.values()) { + if (sv.getAddress() == hostName && sv.getTaskIndex() > baseIndex) { + baseIndex = sv.getTaskIndex(); + } + } + + return baseIndex; + } + + public void addWorkerSpec(String containerId, String hostName) { + + TFWorkerAddress server = new TFWorkerAddress(this, hostName, nextRandomPort(), this.workers.size()); + if (tfMasterNode == null) { + tfMasterNode = server; + } + this.workers.put(containerId, server); + } + + private int maxTaskIndexOfPsInSameNode(String hostName) { + int baseIndex = 0; + for (TFParamServerAddress sv : paramServers.values()) { + if (sv.getAddress() == hostName && sv.getTaskIndex() > baseIndex) { + baseIndex = sv.getTaskIndex(); + } + } + + return baseIndex; + } + + public void addPsSpec(String containerId, String hostName) { + TFParamServerAddress server = new TFParamServerAddress(this, hostName, nextRandomPort(), this.paramServers.size()); + this.paramServers.put(containerId, server); + } + + public String getMasterNodeAddress() { + if (tfMasterNode == null) { + return null; + } + return tfMasterNode.getAddress(); + } + + public int getMasterNodePort() { + if (tfMasterNode == null) { + return 0; + } + return tfMasterNode.getPort(); + } + + public boolean isWorker(String containerId) { + return this.workers.containsKey(containerId); + } + + public boolean isPs(String containerId) { + return this.paramServers.containsKey(containerId); + } + + public TFServerAddress getServerAddress(String containerId) { + TFServerAddress server = this.workers.get(containerId); + if (server == null) { + LOG.info(containerId + " is not a worker" ); + server = this.paramServers.get(containerId); + } + + return server; + } + + @Override + public String toString() { + String worker_array = ""; + for (TFWorkerAddress wk : workers.values()) { + worker_array += wk.getAddress() + ":" + wk.getPort() + ","; + } + String ps_array = ""; + for (TFParamServerAddress ps : paramServers.values()) { + ps_array += ps.getAddress() + ":" + ps.getPort() + ","; + } + + String cp = ""; + if (!worker_array.equals("")) { + cp += "worker : [" + worker_array + "],"; + } + + if (!ps_array.equals("")) { + cp += "ps : [" + ps_array + "]"; + } + return cp; + } + + + public String getJsonString() throws JsonProcessingException { + Map> cluster = new HashMap<>(); + + if (!this.workers.isEmpty()) { + List servers = new ArrayList(); + for (TFWorkerAddress s : this.workers.values()) { + String addr = "" + s.getAddress() + ":" + s.getPort(); + servers.add(addr); + } + cluster.put(WORKER, servers); + } + + if (!this.paramServers.isEmpty()) { + List servers = new ArrayList(); + for (TFParamServerAddress s : this.paramServers.values()) { + String addr = "" + s.getAddress() + ":" + s.getPort(); + servers.add(addr); + } + cluster.put(PS, servers); + } + ObjectMapper objectMapper = new ObjectMapper(); + String json = null; + json = objectMapper.writeValueAsString(cluster); + return json; + } + + public String getBase64EncodedJsonString() throws JsonProcessingException { + byte[] data = getJsonString().getBytes(); + Base64 encoder = new Base64(0, null, true); + return encoder.encodeToString(data); + } + + public static String decodeJsonString(String base64String) { + Base64 decoder = new Base64(0, null, true); + byte[] data = decoder.decode(base64String); + return new String(data); + } + + + public static Map> toClusterMap(String clusterString) throws IOException { + ObjectMapper objectMapper = new ObjectMapper(); + Map> cluster = null; + cluster = objectMapper.readValue(clusterString, Map.class); + + return cluster; + } + + public void testClusterString() { + LOG.info("clusterspec: " + this.toString()); + try { + LOG.info("clusterspec JsonString: " + this.getJsonString()); + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + try { + LOG.info("clusterspec encodeJsonString: " + this.getBase64EncodedJsonString()); + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + String base64DecodedString = null; + try { + base64DecodedString = ClusterSpec.decodeJsonString(this.getBase64EncodedJsonString()); + LOG.info("clusterspec decodeJsonString: " + base64DecodedString); + if (base64DecodedString.equals(this.getJsonString())) { + LOG.info("raw and decode is equal!"); + } + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + + + try { + Map> cs = ClusterSpec.toClusterMap(base64DecodedString); + if (cs.containsKey(WORKER)) { + for (String s : cs.get(WORKER)) { + LOG.info(s); + } + } + + if (cs.containsKey(PS)) { + for (String s : cs.get(PS)) { + LOG.info(s); + } + } + } catch (IOException e) { + e.printStackTrace(); + } + } } \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java new file mode 100644 index 0000000000000..1b7f48983e47a --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java @@ -0,0 +1,165 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +import com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.yarn.api.records.*; + +import java.io.IOException; +import java.util.*; + +/** + * Created by muzhongz on 16-12-20. + */ +public class LaunchContainerThread implements Runnable { + + private static final Log LOG = LogFactory.getLog(LaunchContainerThread.class); + + // Allocated container + private Container container; + private String shellId; + private String tfServerJar; + // Memory to request for the container on which the shell command will run + private long containerMemory = 10; + + // Container retry options + private ContainerRetryPolicy containerRetryPolicy = + ContainerRetryPolicy.NEVER_RETRY; + private Set containerRetryErrorCodes = null; + private int containerMaxRetries = 0; + private int containrRetryInterval = 0; + + private ApplicationMaster appMaster; + + private ApplicationMaster.NMCallbackHandler containerListener; + private TFServerAddress serverAddress = null; + + private LaunchContainerThread(Container container, String shellId, String tfServerJar, long containerMemory, + ContainerRetryPolicy containerRetryPolicy, Set containerRetryErrorCodes, + int containerMaxRetries, int containrRetryInterval, ApplicationMaster appMaster, + ApplicationMaster.NMCallbackHandler containerListener) { + this.container = container; + this.shellId = shellId; + this.tfServerJar = tfServerJar; + this.containerMemory = containerMemory; + this.containerRetryPolicy = containerRetryPolicy; + this.containerRetryErrorCodes = containerRetryErrorCodes; + this.containerMaxRetries = containerMaxRetries; + this.containrRetryInterval = containrRetryInterval; + this.appMaster = appMaster; + this.containerListener = containerListener; + } + + public LaunchContainerThread(Container container, String shellId, String tfServerJar, long containerMemory, + ContainerRetryPolicy containerRetryPolicy, Set containerRetryErrorCodes, + int containerMaxRetries, int containrRetryInterval, ApplicationMaster appMaster, + ApplicationMaster.NMCallbackHandler containerListener, TFServerAddress serverAddress) { + this(container, + shellId, + tfServerJar, + containerMemory, + containerRetryPolicy, + containerRetryErrorCodes, + containerMaxRetries, + containrRetryInterval, + appMaster, + containerListener); + this.serverAddress = serverAddress; + if (this.serverAddress == null) { + LOG.info("server address is null"); + } + } + + @Override + /** + * Connects to CM, sets up container launch context + * for shell command and eventually dispatches the container + * start request to the CM. + */ + public void run() { + LOG.info("Setting up container launch container for containerid=" + + container.getId() + " with shellid=" + shellId); + + // Set the env variables to be setup in the env where the tf servers will be run + LOG.info("Set the environment for the tf servers"); + + FileSystem fs = null; + try { + fs = FileSystem.get(appMaster.getConfiguration()); + } catch (IOException e) { + e.printStackTrace(); + } + + TFContainer tfContainer = new TFContainer(appMaster); + + // Set the java environment + Map env = tfContainer.setJavaEnv(appMaster.getConfiguration(), null); + + // Set the local resources + Map localResources = new HashMap(); + + ApplicationId appId = appMaster.getAppAttempId().getApplicationId(); + String tfServerPy = TFContainer.SERVER_PY_PATH; + try { + tfContainer.addToLocalResources(fs, tfServerPy, TFContainer.SERVER_PY_PATH, appId.toString(), + localResources, null); + } catch (IOException e) { + e.printStackTrace(); + } + + try { + tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, appId.toString(), + localResources, null); + } catch (IOException e) { + e.printStackTrace(); + } + + + LOG.info("clusterspec: " + this.serverAddress.getClusterSpec().toString()); + this.serverAddress.getClusterSpec().testClusterString(); + ClusterSpec cs = this.serverAddress.getClusterSpec(); + + StringBuilder command = null; + try { + command = tfContainer.makeCommands(containerMemory, + cs.getBase64EncodedJsonString(), + this.serverAddress.getJobName(), + this.serverAddress.getTaskIndex()); + } catch (JsonProcessingException e) { + LOG.info("cluster spec cannot convert into base64 json string!"); + e.printStackTrace(); + } + + List commands = new ArrayList(); + commands.add(command.toString()); + //commands.add("/home/muzhongz/tfonyarn/tf_server.sh"); + if (serverAddress != null) { + LOG.info(serverAddress.getJobName() + " : " + serverAddress.getAddress() + ":" + serverAddress.getPort()); + } + + // Set up ContainerLaunchContext, setting local resource, environment, + // command and token for constructor. + + // Note for tokens: Set up tokens for the container too. Today, for normal + // shell commands, the container in distribute-shell doesn't need any + // tokens. We are populating them mainly for NodeManagers to be able to + // download anyfiles in the distributed file-system. The tokens are + // otherwise also useful in cases, for e.g., when one is running a + // "hadoop dfs" command inside the distributed shell. + + ContainerRetryContext containerRetryContext = + ContainerRetryContext.newInstance( + containerRetryPolicy, containerRetryErrorCodes, + containerMaxRetries, containrRetryInterval); + for (String cmd : commands) { + LOG.info("command: " + cmd.toString()); + } + ContainerLaunchContext ctx = ContainerLaunchContext.newInstance( + localResources, env, commands, null, appMaster.getAllTokens().duplicate(), + null, containerRetryContext); + containerListener.addContainer(container.getId(), container); + appMaster.getNMClientAsync().startContainerAsync(container, ctx); + } + +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java index 3907f92135e91..0de90e79b9bef 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java @@ -26,7 +26,7 @@ */ public class TFAmContainer { private static final Log LOG = LogFactory.getLog(TFAmContainer.class); - + public static final String APPMASTER_JAR_PATH = "AppMaster.jar"; private Client client; public TFAmContainer(Client client) { this.client = client; @@ -51,6 +51,8 @@ public void addToLocalResources(FileSystem fs, String fileSrcPath, } else { fs.copyFromLocalFile(new Path(fileSrcPath), dst); } + + LOG.info("copy: " + fileSrcPath + " ===> " + dst.toString()); FileStatus scFileStatus = fs.getFileStatus(dst); LocalResource scRsrc = LocalResource.newInstance( @@ -107,8 +109,8 @@ public StringBuilder makeCommands(long amMemory, String appMasterMainClass, int vargs.add("--container_memory " + String.valueOf(containerMemory)); vargs.add("--container_vcores " + String.valueOf(containerVirtualCores)); vargs.add("--num_containers " + String.valueOf(numContainers)); - vargs.add("--" + Client.OPT_TF_CLIENT + " " + String.valueOf(tfClientPy)); - vargs.add("--" + Client. OPT_TF_SERVER_JAR + " " + String.valueOf(tfConatinerJar)); + vargs.add("--" + TFApplication.OPT_TF_CLIENT + " " + String.valueOf(tfClientPy)); + vargs.add("--" + TFApplication.OPT_TF_SERVER_JAR + " " + String.valueOf(tfConatinerJar)); if (debugFlag) { vargs.add("--debug"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java new file mode 100644 index 0000000000000..f1b4cd72050f3 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java @@ -0,0 +1,11 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +/** + * Created by muzhongz on 16-12-16. + */ +public class TFApplication { + public static final String OPT_TF_CLIENT = "tf_client"; + public static final String OPT_TF_CLIENT_PY = "tf_clientpy"; + public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; + public static final String OPT_TF_SERVER_PY = "tf_serverpy"; +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java index 3168f484db989..3b26566dfba25 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java @@ -1,29 +1,80 @@ package org.apache.hadoop.yarn.applications.tensorflow; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.io.BufferedReader; import java.io.IOException; +import java.io.InputStreamReader; /** * Created by muzhongz on 16-11-30. */ public class TFClient implements Runnable { - public static final String DEFAULT_TF_CLIENT_PY = "tf_client.py"; - private String clientPy; - - public TFClient(String tfClientName) { - clientPy = tfClientName; - } + private static final Log LOG = LogFactory.getLog(TFClient.class); + public static final String TF_CLIENT_PY = "tf_client.py"; + private String tfClientPy; + private String tfMasterAddress; + private int tfMasterPort; - @Override - public void run() { + private void execCmd(String cmd) { Process process = null; try { - process = Runtime.getRuntime().exec("python " + clientPy); - process.waitFor(); + LOG.info("cmd is " + cmd); + process = Runtime.getRuntime().exec(cmd); } catch (IOException e) { + LOG.fatal("cmd running failed", e); e.printStackTrace(); + } + + try { + LOG.info("cmd log--->"); + BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream())); + String line; + while ((line = in.readLine()) != null) { + + LOG.info(line); + System.out.println(line); + } + in.close(); + LOG.info("<---cmd log end"); + process.waitFor(); } catch (InterruptedException e) { + LOG.fatal("waiting error ", e); + e.printStackTrace(); + } catch (IOException e) { + LOG.info("io exception"); e.printStackTrace(); } } + + public TFClient(String tfClientPy) { + LOG.info("tf client py script: " + tfClientPy); + this.tfClientPy = tfClientPy; + } + + @Override + public void run() { + execCmd("ls -l"); + if (tfMasterAddress == null || tfMasterPort == 0) { + LOG.fatal("invalid master address!"); + execCmd("python " + tfClientPy); + } else { + execCmd("python " + tfClientPy + " \"" + tfMasterAddress + "\"" + " " + tfMasterPort); + } + } + + public void startTensorflowClient() { + Thread thread = new Thread(this); + thread.start(); + } + + public void tensorflowServersReady(boolean ready, String masterAddress, int port) { + LOG.info("tf master : " + masterAddress + ":" + port); + + this.tfMasterAddress = masterAddress; + this.tfMasterPort = port; + startTensorflowClient(); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java index c6d7f926af927..d909a78384617 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java @@ -28,6 +28,7 @@ public class TFContainer { private String appName = DSConstants.APP_NAME; private ApplicationMaster appMaster; public static final String SERVER_PY_PATH = "tf_server.py"; + public static final String SERVER_JAR_PATH = "TFServer.jar"; public TFContainer(ApplicationMaster am) { appMaster = am; @@ -41,7 +42,6 @@ public void addToLocalResources(FileSystem fs, String fileSrcPath, appName + "/" + appId + "/" + fileDstPath; Path dst = new Path(fs.getHomeDirectory(), suffix); - LOG.info( fileSrcPath + " ==> " + dst.toString()); if (fileSrcPath == null) { FSDataOutputStream ostream = null; try { @@ -54,6 +54,7 @@ public void addToLocalResources(FileSystem fs, String fileSrcPath, } else { fs.copyFromLocalFile(new Path(fileSrcPath), dst); } + LOG.info("copy: " + fileSrcPath + " ===> " + dst.toString()); FileStatus scFileStatus = fs.getFileStatus(dst); LocalResource scRsrc = LocalResource.newInstance( @@ -90,13 +91,16 @@ public Map setJavaEnv(Configuration conf, String tfServerJar) { return env; } - public StringBuilder makeCommands(long containerMemory) { + public StringBuilder makeCommands(long containerMemory, String clusterSpec, String jobName, int taskIndex) { // Set the necessary command to execute on the allocated container Vector vargs = new Vector(5); vargs.add(ApplicationConstants.Environment.JAVA_HOME.$$() + "/bin/java"); vargs.add("-Xmx" + containerMemory + "m"); String containerClassName = TFServer.class.getName(); vargs.add(containerClassName); + vargs.add("--" + TFServer.OPT_CS + " " + clusterSpec); + vargs.add("--" + TFServer.OPT_JN + " " + jobName); + vargs.add("--" + TFServer.OPT_TI + " " + taskIndex); vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/TFServer." + ApplicationConstants.STDOUT); vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/TFServer." + ApplicationConstants.STDERR); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFParamServerAddress.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFParamServerAddress.java new file mode 100644 index 0000000000000..d979d451b352f --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFParamServerAddress.java @@ -0,0 +1,11 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +/** + * Created by muzhongz on 16-12-19. + */ +public class TFParamServerAddress extends TFServerAddress{ + + public TFParamServerAddress(ClusterSpec cluster, String address, int port, int taskIndex) { + super(cluster, address, port, "ps", taskIndex); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java index b96e354aa7f87..ec5b712b8fcdc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java @@ -1,6 +1,10 @@ package org.apache.hadoop.yarn.applications.tensorflow; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -8,6 +12,9 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import java.util.List; +import java.util.Map; + /** * Created by muzhongz on 16-11-29. @@ -15,14 +22,67 @@ public class TFServer implements Runnable { private static final Log LOG = LogFactory.getLog(TFServer.class); private String tfServerPy; - private YarnConfiguration conf; + public static final String OPT_CS = "cs"; + public static final String OPT_TI = "ti"; + public static final String OPT_JN = "jn"; + + + private String clusterSpecString = null; + private Map> cluster = null; + private String jobName = null; + private int taskIndex = -1; + + // Command line options + private Options opts; public static void main(String[] args) { LOG.info("start container"); - TFServer server = new TFServer("tf_server.py"); + TFServer server = new TFServer(); + try { + try { + if (!server.init(args)) { + LOG.info("init failed!"); + } + } catch (IOException e) { + e.printStackTrace(); + } + } catch (ParseException e) { + LOG.info("parse failed"); + e.printStackTrace(); + } server.startTFServer(); } + + public TFServer() { + opts = new Options(); + opts.addOption(OPT_CS, true, "tf server cluster spec"); + opts.addOption(OPT_JN, true, "tf job name"); + opts.addOption(OPT_TI, true, "tf task index"); + } + + public boolean init(String[] args) throws ParseException, IOException { + + CommandLine cliParser = new GnuParser().parse(opts, args); + + if (args.length == 0) { + throw new IllegalArgumentException("No args specified for tf server to initialize"); + } + + if (!cliParser.hasOption(OPT_CS) || !cliParser.hasOption(OPT_JN) || !cliParser.hasOption(OPT_TI)) { + LOG.error("invalid args for tf server!"); + return false; + } + + clusterSpecString = ClusterSpec.decodeJsonString(cliParser.getOptionValue(OPT_CS)); + jobName = cliParser.getOptionValue(OPT_JN); + taskIndex = Integer.parseInt(cliParser.getOptionValue(OPT_TI)); + LOG.info("cs: " + clusterSpecString + "; + jn: " + jobName + "; ti: " + taskIndex); + cluster = ClusterSpec.toClusterMap(clusterSpecString); + return true; + } + + private void execCmd(String cmd) { Process process = null; try { @@ -58,33 +118,16 @@ private void execCmd(String cmd) { public void run() { execCmd("ls -l"); execCmd("pwd"); - execCmd("python --version"); - String command = "python " + tfServerPy; + String command = tfServerPy; execCmd(command); } - public TFServer(String serverPy) { - this.tfServerPy = serverPy; - conf = new YarnConfiguration(); - /* - for (String c : conf.getStrings( - YarnConfiguration.YARN_APPLICATION_CLASSPATH, - YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) { - LOG.info(c); - } - */ - } private String address; private int port; - private ClusterSpec clusterSpec; public void startTFServer() { - Thread thread = new Thread(this); - thread.start(); - } - - public int stochasticNetport() { - return 0; + org.tensorflow.bridge.TFServer server = new org.tensorflow.bridge.TFServer(cluster, jobName, taskIndex); + server.start(); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServerAddress.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServerAddress.java new file mode 100644 index 0000000000000..9c140f9bd10e3 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServerAddress.java @@ -0,0 +1,60 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +/** + * Created by muzhongz on 16-12-21. + */ +public class TFServerAddress { + private String address; + private int port; + private String jobName; /* worker or ps */ + private int taskIndex; + private ClusterSpec clusterSpec; + + protected TFServerAddress(ClusterSpec cluster, String address, int port, String jobName, int taskIndex) { + this.setClusterSpec(cluster); + this.setAddress(address); + this.setPort(port); + this.setJobName(jobName); + this.setTaskIndex(taskIndex); + } + + public String getAddress() { + return address; + } + + public void setAddress(String address) { + this.address = address; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public int getTaskIndex() { + return taskIndex; + } + + public void setTaskIndex(int taskIndex) { + this.taskIndex = taskIndex; + } + + public ClusterSpec getClusterSpec() { + return clusterSpec; + } + + public void setClusterSpec(ClusterSpec clusterSpec) { + this.clusterSpec = clusterSpec; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFWorkerAddress.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFWorkerAddress.java new file mode 100644 index 0000000000000..425017159915f --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFWorkerAddress.java @@ -0,0 +1,12 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +/** + * Created by muzhongz on 16-12-19. + */ +public class TFWorkerAddress extends TFServerAddress { + + public TFWorkerAddress(ClusterSpec cluster, String address, int port, int taskIndex) { + super(cluster, address, port, "worker", taskIndex); + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/proto/tfappmaster_rpc_service.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/proto/tfappmaster_rpc_service.proto new file mode 100644 index 0000000000000..cc33790383c0c --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/proto/tfappmaster_rpc_service.proto @@ -0,0 +1,7 @@ +option java_package = "org.apache.hadoop.yarn.applications.tensorflow"; +option java_outer_classname = "TFAppMasterRpcService"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +package hadoop.yarn.; + +import "test.proto"; \ No newline at end of file From ca01f0ddf94f87ca2e1f9caccc593cc131022d6e Mon Sep 17 00:00:00 2001 From: Zhong Muzhong Date: Mon, 26 Dec 2016 13:47:33 +0800 Subject: [PATCH 05/32] MAVEN: add tf project to the yarn application list --- .../hadoop-yarn/hadoop-yarn-applications/pom.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/pom.xml index 233a3532af152..149082566bb4a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/pom.xml @@ -22,6 +22,7 @@ 3.0.0-alpha2-SNAPSHOT 4.0.0 + org.apache.hadoop hadoop-yarn-applications 3.0.0-alpha2-SNAPSHOT Apache Hadoop YARN Applications @@ -35,6 +36,7 @@ hadoop-yarn-applications-distributedshell + hadoop-yarn-applications-tensorflow hadoop-yarn-applications-unmanaged-am-launcher From 4633fb721d579643c089afdabd2ac9ca452d395b Mon Sep 17 00:00:00 2001 From: muzhongz Date: Mon, 26 Dec 2016 14:31:59 +0800 Subject: [PATCH 06/32] TIMELINE: clean useless codes "timeline" --- .../tensorflow/ApplicationMaster.java | 305 +----------------- .../yarn/applications/tensorflow/Client.java | 63 ---- .../DistributedShellTimelinePlugin.java | 79 ----- .../applications/tensorflow/TFServer.java | 4 +- 4 files changed, 5 insertions(+), 446 deletions(-) delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DistributedShellTimelinePlugin.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index 57138df1b923d..96a92afd96b87 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -140,7 +140,7 @@ public ApplicationAttemptId getAppAttempId() { // Tracking url to which app master publishes info for clients to monitor private String appMasterTrackingUrl = ""; - private boolean timelineServiceV2 = false; +/* private boolean timelineServiceV2 = false;*/ // App Master configuration // No. of containers to run shell command on @@ -207,11 +207,11 @@ public ByteBuffer getAllTokens() { private List launchThreads = new ArrayList(); // Timeline Client - @VisibleForTesting +/* @VisibleForTesting TimelineClient timelineClient; static final String CONTAINER_ENTITY_GROUP_ID = "CONTAINERS"; static final String APPID_TIMELINE_FILTER_NAME = "appId"; - static final String USER_TIMELINE_FILTER_NAME = "user"; + static final String USER_TIMELINE_FILTER_NAME = "user";*/ private final String linux_bash_command = "bash"; private final String windows_command = "cmd /c"; @@ -441,13 +441,6 @@ public boolean init(String[] args) throws ParseException, IOException { tfServerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR); } - if (YarnConfiguration.timelineServiceEnabled(conf)) { - timelineServiceV2 = YarnConfiguration.timelineServiceV2Enabled(conf); - } else { - timelineClient = null; - LOG.warn("Timeline service is not enabled"); - } - return true; } @@ -506,21 +499,6 @@ public void run() throws YarnException, IOException, InterruptedException { nmClientAsync.init(conf); nmClientAsync.start(); - startTimelineClient(conf); - if (timelineServiceV2) { - // need to bind timelineClient - amRMClient.registerTimelineClient(timelineClient); - } - if(timelineClient != null) { - if (timelineServiceV2) { - publishApplicationAttemptEventOnTimelineServiceV2( - DSEvent.DS_APP_ATTEMPT_START); - } else { - publishApplicationAttemptEvent(timelineClient, appAttemptID.toString(), - DSEvent.DS_APP_ATTEMPT_START, domainId, appSubmitterUgi); - } - } - // Setup local RPC Server to accept status requests directly from clients // TODO need to setup a protocol for client to be able to communicate to // the RPC server @@ -596,37 +574,6 @@ public void run() { thread.start(); } - @VisibleForTesting - void startTimelineClient(final Configuration conf) - throws YarnException, IOException, InterruptedException { - try { - appSubmitterUgi.doAs(new PrivilegedExceptionAction() { - @Override - public Void run() throws Exception { - if (YarnConfiguration.timelineServiceEnabled(conf)) { - // Creating the Timeline Client - if (timelineServiceV2) { - timelineClient = TimelineClient.createTimelineClient( - appAttemptID.getApplicationId()); - LOG.info("Timeline service V2 client is enabled"); - } else { - timelineClient = TimelineClient.createTimelineClient(); - LOG.info("Timeline service V1 client is enabled"); - } - timelineClient.init(conf); - timelineClient.start(); - } else { - timelineClient = null; - LOG.warn("Timeline service is not enabled"); - } - return null; - } - }); - } catch (UndeclaredThrowableException e) { - throw new YarnException(e.getCause()); - } - } - @VisibleForTesting NMCallbackHandler createNMCallbackHandler() { return new NMCallbackHandler(this); @@ -642,16 +589,6 @@ protected boolean finish() { } catch (InterruptedException ex) {} } - if (timelineClient != null) { - if (timelineServiceV2) { - publishApplicationAttemptEventOnTimelineServiceV2( - DSEvent.DS_APP_ATTEMPT_END); - } else { - publishApplicationAttemptEvent(timelineClient, appAttemptID.toString(), - DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi); - } - } - // Join all launched threads // needed for when we time out // and we need to release containers @@ -698,11 +635,6 @@ protected boolean finish() { amRMClient.stop(); - // Stop Timeline Client - if(timelineClient != null) { - timelineClient.stop(); - } - return success; } @@ -826,14 +758,6 @@ public void onContainersCompleted(List completedContainers) { LOG.info("Container completed successfully." + ", containerId=" + containerStatus.getContainerId()); } - if(timelineClient != null) { - if (timelineServiceV2) { - publishContainerEndEventOnTimelineServiceV2(containerStatus); - } else { - publishContainerEndEvent( - timelineClient, containerStatus, domainId, appSubmitterUgi); - } - } } // ask for more containers if any failed @@ -938,16 +862,6 @@ public void onContainerStarted(ContainerId containerId, applicationMaster.nmClientAsync.getContainerStatusAsync( containerId, container.getNodeId()); } - if(applicationMaster.timelineClient != null) { - if (applicationMaster.timelineServiceV2) { - applicationMaster.publishContainerStartEventOnTimelineServiceV2( - container); - } else { - applicationMaster.publishContainerStartEvent( - applicationMaster.timelineClient, container, - applicationMaster.domainId, applicationMaster.appSubmitterUgi); - } - } } @Override @@ -1019,115 +933,6 @@ private String readContent(String filePath) throws IOException { } } - private void publishContainerStartEvent( - final TimelineClient timelineClient, final Container container, - String domainId, UserGroupInformation ugi) { - final TimelineEntity entity = new TimelineEntity(); - entity.setEntityId(container.getId().toString()); - entity.setEntityType(DSEntity.DS_CONTAINER.toString()); - entity.setDomainId(domainId); - entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName()); - entity.addPrimaryFilter(APPID_TIMELINE_FILTER_NAME, container.getId() - .getApplicationAttemptId().getApplicationId().toString()); - TimelineEvent event = new TimelineEvent(); - event.setTimestamp(System.currentTimeMillis()); - event.setEventType(DSEvent.DS_CONTAINER_START.toString()); - event.addEventInfo("Node", container.getNodeId().toString()); - event.addEventInfo("Resources", container.getResource().toString()); - entity.addEvent(event); - - try { - processTimelineResponseErrors( - putContainerEntity(timelineClient, - container.getId().getApplicationAttemptId(), - entity)); - } catch (YarnException | IOException | ClientHandlerException e) { - LOG.error("Container start event could not be published for " - + container.getId().toString(), e); - } - } - - @VisibleForTesting - void publishContainerEndEvent( - final TimelineClient timelineClient, ContainerStatus container, - String domainId, UserGroupInformation ugi) { - final TimelineEntity entity = new TimelineEntity(); - entity.setEntityId(container.getContainerId().toString()); - entity.setEntityType(DSEntity.DS_CONTAINER.toString()); - entity.setDomainId(domainId); - entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName()); - entity.addPrimaryFilter(APPID_TIMELINE_FILTER_NAME, - container.getContainerId().getApplicationAttemptId() - .getApplicationId().toString()); - TimelineEvent event = new TimelineEvent(); - event.setTimestamp(System.currentTimeMillis()); - event.setEventType(DSEvent.DS_CONTAINER_END.toString()); - event.addEventInfo("State", container.getState().name()); - event.addEventInfo("Exit Status", container.getExitStatus()); - entity.addEvent(event); - try { - processTimelineResponseErrors( - putContainerEntity(timelineClient, - container.getContainerId().getApplicationAttemptId(), - entity)); - } catch (YarnException | IOException | ClientHandlerException e) { - LOG.error("Container end event could not be published for " - + container.getContainerId().toString(), e); - } - } - - private TimelinePutResponse putContainerEntity( - TimelineClient timelineClient, ApplicationAttemptId currAttemptId, - TimelineEntity entity) - throws YarnException, IOException { - if (TimelineUtils.timelineServiceV1_5Enabled(conf)) { - TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance( - currAttemptId.getApplicationId(), - CONTAINER_ENTITY_GROUP_ID); - return timelineClient.putEntities(currAttemptId, groupId, entity); - } else { - return timelineClient.putEntities(entity); - } - } - - private void publishApplicationAttemptEvent( - final TimelineClient timelineClient, String appAttemptId, - DSEvent appEvent, String domainId, UserGroupInformation ugi) { - final TimelineEntity entity = new TimelineEntity(); - entity.setEntityId(appAttemptId); - entity.setEntityType(DSEntity.DS_APP_ATTEMPT.toString()); - entity.setDomainId(domainId); - entity.addPrimaryFilter(USER_TIMELINE_FILTER_NAME, ugi.getShortUserName()); - TimelineEvent event = new TimelineEvent(); - event.setEventType(appEvent.toString()); - event.setTimestamp(System.currentTimeMillis()); - entity.addEvent(event); - try { - TimelinePutResponse response = timelineClient.putEntities(entity); - processTimelineResponseErrors(response); - } catch (YarnException | IOException | ClientHandlerException e) { - LOG.error("App Attempt " - + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? "start" : "end") - + " event could not be published for " - + appAttemptID, e); - } - } - - private TimelinePutResponse processTimelineResponseErrors( - TimelinePutResponse response) { - List errors = response.getErrors(); - if (errors.size() == 0) { - LOG.debug("Timeline entities are successfully put"); - } else { - for (TimelinePutResponse.TimelinePutError error : errors) { - LOG.error( - "Error when publishing entity [" + error.getEntityType() + "," - + error.getEntityId() + "], server side error code: " - + error.getErrorCode()); - } - } - return response; - } RMCallbackHandler getRMCallbackHandler() { return new RMCallbackHandler(); @@ -1148,108 +953,4 @@ boolean getDone() { return done; } - private void publishContainerStartEventOnTimelineServiceV2( - Container container) { - final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity - entity = - new org.apache.hadoop.yarn.api.records.timelineservice. - TimelineEntity(); - entity.setId(container.getId().toString()); - entity.setType(DSEntity.DS_CONTAINER.toString()); - long ts = System.currentTimeMillis(); - entity.setCreatedTime(ts); - entity.addInfo("user", appSubmitterUgi.getShortUserName()); - - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); - event.setTimestamp(ts); - event.setId(DSEvent.DS_CONTAINER_START.toString()); - event.addInfo("Node", container.getNodeId().toString()); - event.addInfo("Resources", container.getResource().toString()); - entity.addEvent(event); - - try { - appSubmitterUgi.doAs(new PrivilegedExceptionAction() { - @Override - public TimelinePutResponse run() throws Exception { - timelineClient.putEntities(entity); - return null; - } - }); - } catch (Exception e) { - LOG.error("Container start event could not be published for " - + container.getId().toString(), - e instanceof UndeclaredThrowableException ? e.getCause() : e); - } - } - - private void publishContainerEndEventOnTimelineServiceV2( - final ContainerStatus container) { - final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity - entity = - new org.apache.hadoop.yarn.api.records.timelineservice. - TimelineEntity(); - entity.setId(container.getContainerId().toString()); - entity.setType(DSEntity.DS_CONTAINER.toString()); - //entity.setDomainId(domainId); - entity.addInfo("user", appSubmitterUgi.getShortUserName()); - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); - event.setTimestamp(System.currentTimeMillis()); - event.setId(DSEvent.DS_CONTAINER_END.toString()); - event.addInfo("State", container.getState().name()); - event.addInfo("Exit Status", container.getExitStatus()); - entity.addEvent(event); - - try { - appSubmitterUgi.doAs(new PrivilegedExceptionAction() { - @Override - public TimelinePutResponse run() throws Exception { - timelineClient.putEntities(entity); - return null; - } - }); - } catch (Exception e) { - LOG.error("Container end event could not be published for " - + container.getContainerId().toString(), - e instanceof UndeclaredThrowableException ? e.getCause() : e); - } - } - - private void publishApplicationAttemptEventOnTimelineServiceV2( - DSEvent appEvent) { - final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity - entity = - new org.apache.hadoop.yarn.api.records.timelineservice. - TimelineEntity(); - entity.setId(appAttemptID.toString()); - entity.setType(DSEntity.DS_APP_ATTEMPT.toString()); - long ts = System.currentTimeMillis(); - if (appEvent == DSEvent.DS_APP_ATTEMPT_START) { - entity.setCreatedTime(ts); - } - entity.addInfo("user", appSubmitterUgi.getShortUserName()); - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent event = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent(); - event.setId(appEvent.toString()); - event.setTimestamp(ts); - entity.addEvent(event); - - try { - appSubmitterUgi.doAs(new PrivilegedExceptionAction() { - @Override - public TimelinePutResponse run() throws Exception { - timelineClient.putEntitiesAsync(entity); - return null; - } - }); - } catch (Exception e) { - LOG.error("App Attempt " - + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? "start" : "end") - + " event could not be published for " - + appAttemptID, - e instanceof UndeclaredThrowableException ? e.getCause() : e); - } - } - } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index b68dc36eea872..081c3892455f7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -33,8 +33,6 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.records.*; -import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain; -import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.client.api.YarnClientApplication; import org.apache.hadoop.yarn.client.util.YarnClientUtils; @@ -135,18 +133,9 @@ public String getAppName() { // Debug flag boolean debugFlag = false; - // Timeline domain ID - private String domainId = null; - // Flag to indicate whether to create the domain of the given ID private boolean toCreateDomain = false; - // Timeline domain reader access control - private String viewACLs = null; - - // Timeline domain writer access control - private String modifyACLs = null; - private String flowName = null; private String flowVersion = null; private long flowRunId = 0L; @@ -229,12 +218,6 @@ public Client(Configuration conf) throws Exception { "If failure count reaches to maxAppAttempts, " + "the application will be failed."); opts.addOption("debug", false, "Dump out debug information"); - opts.addOption("domain", true, "ID of the timeline domain where the " - + "timeline entities will be put"); - opts.addOption("view_acls", true, "Users and groups that allowed to " - + "view the timeline entities in the given domain"); - opts.addOption("modify_acls", true, "Users and groups that allowed to " - + "modify the timeline entities in the given domain"); opts.addOption("create", false, "Flag to indicate whether to create the " + "domain specified with -domain."); opts.addOption("flow_name", true, "Flow name which the distributed shell " @@ -372,18 +355,6 @@ public boolean init(String[] args) throws ParseException { log4jPropFile = cliParser.getOptionValue("log_properties", ""); - // Get timeline domain options - if (cliParser.hasOption("domain")) { - domainId = cliParser.getOptionValue("domain"); - toCreateDomain = cliParser.hasOption("create"); - if (cliParser.hasOption("view_acls")) { - viewACLs = cliParser.getOptionValue("view_acls"); - } - if (cliParser.hasOption("modify_acls")) { - modifyACLs = cliParser.getOptionValue("modify_acls"); - } - } - // Get container retry options if (cliParser.hasOption("container_retry_policy")) { containerRetryOptions.add("--container_retry_policy " @@ -462,10 +433,6 @@ public boolean run() throws IOException, YarnException { } } - if (domainId != null && domainId.length() > 0 && toCreateDomain) { - prepareTimelineDomain(); - } - // Get a new application id YarnClientApplication app = yarnClient.createApplication(); GetNewApplicationResponse appResponse = app.getNewApplicationResponse(); @@ -718,34 +685,4 @@ private void forceKillApplication(ApplicationId appId) yarnClient.killApplication(appId); } - private void prepareTimelineDomain() { - TimelineClient timelineClient = null; - if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, - YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) { - timelineClient = TimelineClient.createTimelineClient(); - timelineClient.init(conf); - timelineClient.start(); - } else { - LOG.warn("Cannot put the domain " + domainId + - " because the timeline service is not enabled"); - return; - } - try { - //TODO: we need to check and combine the existing timeline domain ACLs, - //but let's do it once we have client java library to query domains. - TimelineDomain domain = new TimelineDomain(); - domain.setId(domainId); - domain.setReaders( - viewACLs != null && viewACLs.length() > 0 ? viewACLs : " "); - domain.setWriters( - modifyACLs != null && modifyACLs.length() > 0 ? modifyACLs : " "); - timelineClient.putDomain(domain); - LOG.info("Put the timeline domain: " + - TimelineUtils.dumpTimelineRecordtoJSON(domain)); - } catch (Exception e) { - LOG.error("Error when putting the timeline domain", e); - } finally { - timelineClient.stop(); - } - } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DistributedShellTimelinePlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DistributedShellTimelinePlugin.java deleted file mode 100644 index 28cbdb344c707..0000000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DistributedShellTimelinePlugin.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.applications.tensorflow; - -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId; -import org.apache.hadoop.yarn.server.timeline.NameValuePair; -import org.apache.hadoop.yarn.server.timeline.TimelineEntityGroupPlugin; -import org.apache.hadoop.yarn.util.ConverterUtils; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; -import java.util.SortedSet; - -/** - * Timeline v1.5 reader plugin for YARN distributed shell. It tranlsates an - * incoming getEntity request to a set of related timeline entity groups, via - * the information provided in the primary filter or entity id field. - */ -public class DistributedShellTimelinePlugin extends TimelineEntityGroupPlugin { - - @Override - public Set getTimelineEntityGroupId(String entityType, - NameValuePair primaryFilter, Collection secondaryFilters) { - if (ApplicationMaster.DSEntity.DS_CONTAINER.toString().equals(entityType)) { - if (primaryFilter == null) { - return null; - } - return toEntityGroupId(primaryFilter.getValue().toString()); - } - return null; - } - - @Override - public Set getTimelineEntityGroupId(String entityId, - String entityType) { - if (ApplicationMaster.DSEntity.DS_CONTAINER.toString().equals(entityId)) { - ContainerId containerId = ContainerId.fromString(entityId); - ApplicationId appId = containerId.getApplicationAttemptId() - .getApplicationId(); - return toEntityGroupId(appId.toString()); - } - return null; - } - - @Override - public Set getTimelineEntityGroupId(String entityType, - SortedSet entityIds, Set eventTypes) { - // Right now this method is not used by TimelineEntityGroupPlugin - return null; - } - - private Set toEntityGroupId(String strAppId) { - ApplicationId appId = ApplicationId.fromString(strAppId); - TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance( - appId, ApplicationMaster.CONTAINER_ENTITY_GROUP_ID); - Set result = new HashSet<>(); - result.add(groupId); - return result; - } -} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java index ec5b712b8fcdc..d34aa507e4085 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java @@ -127,7 +127,7 @@ public void run() { private int port; public void startTFServer() { - org.tensorflow.bridge.TFServer server = new org.tensorflow.bridge.TFServer(cluster, jobName, taskIndex); - server.start(); +/* org.tensorflow.bridge.TFServer server = new org.tensorflow.bridge.TFServer(cluster, jobName, taskIndex); + server.start();*/ } } From 78ef3bd67a5e1211907fb7b4f671a62126f3ba44 Mon Sep 17 00:00:00 2001 From: huafengw Date: Fri, 23 Dec 2016 16:49:05 +0800 Subject: [PATCH 07/32] initial commit --- hadoop-yarn-project/hadoop-yarn/pom.xml | 1 + .../hadoop-yarn/tensorflow-rpc/pom.xml | 75 +++++++++++++++++++ .../tensorflow/api/TensorflowCluster.java | 4 + .../src/main/proto/TensorflowCluster.proto | 9 +++ .../yarn_tensorflow_cluster_protos.proto | 10 +++ 5 files changed, 99 insertions(+) create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto diff --git a/hadoop-yarn-project/hadoop-yarn/pom.xml b/hadoop-yarn-project/hadoop-yarn/pom.xml index c92929210c087..eb93715450a2a 100644 --- a/hadoop-yarn-project/hadoop-yarn/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/pom.xml @@ -237,5 +237,6 @@ hadoop-yarn-client hadoop-yarn-registry hadoop-yarn-ui + hadoop-yarn-tfrpc diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml new file mode 100644 index 0000000000000..3e9cc56017f36 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml @@ -0,0 +1,75 @@ + + + + + hadoop-yarn + org.apache.hadoop + 3.0.0-alpha2-SNAPSHOT + + 4.0.0 + hadoop-yarn-tfrpc + 3.0.0-alpha2-SNAPSHOT + Apache Hadoop YARN Common + + + + ${project.parent.basedir} + true + ../dev-support + + + + + org.apache.hadoop + hadoop-yarn-common + + + + + + + org.apache.hadoop + hadoop-maven-plugins + + + compile-protoc + + protoc + + + ${protobuf.version} + ${protoc.path} + + ${basedir}/../../../hadoop-common-project/hadoop-common/src/main/proto + ${basedir}/../../../hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto + ${basedir}/src/main/proto + + + ${basedir}/src/main/proto + + TensorflowCluster.proto + yarn_tensorflow_cluster_protos.proto + + + + + + + + + diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java new file mode 100644 index 0000000000000..06e4955ed8d96 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java @@ -0,0 +1,4 @@ +package org.apache.hadoop.yarn.tensorflow.api; + +public interface TensorflowCluster { +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto new file mode 100644 index 0000000000000..318b00f347354 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto @@ -0,0 +1,9 @@ +option java_package = "org.apache.hadoop.yarn.tensorflow.proto"; +option java_outer_classname = "TensorflowCluster"; +option java_generic_services = true; +option java_generate_equal_and_hash = true; +import "yarn_server_common_service_protos.proto" + +service TensorflowCluster { + rpc getClusterSpec(GetClusterSpecRequestProto) returns (GetClusterSpecResponseProto); +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto new file mode 100644 index 0000000000000..2f287674e8212 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto @@ -0,0 +1,10 @@ +import "yarn_protos.proto" +import "yarn_server_common_protos.proto" + +message GetClusterSpecRequestProto { + +} + +message GetClusterSpecResponseProto { + required string cluster_spec = 1; +} \ No newline at end of file From ff6892c2473ae2c7287a4a3bd8ed97a8194be8c9 Mon Sep 17 00:00:00 2001 From: huafengw Date: Mon, 26 Dec 2016 16:28:20 +0800 Subject: [PATCH 08/32] add impl --- hadoop-yarn-project/hadoop-yarn/pom.xml | 2 +- .../hadoop-yarn/tensorflow-rpc/pom.xml | 13 ++- .../hadoop/yarn/tensorflow/api/TFClient.java | 55 ++++++++++++ .../tensorflow/api/TensorflowCluster.java | 25 ++++++ .../tensorflow/api/TensorflowClusterImpl.java | 77 ++++++++++++++++ .../tensorflow/api/TensorflowClusterPB.java | 27 ++++++ .../client/TensorflowClusterPBClientImpl.java | 64 +++++++++++++ .../TensorflowClusterPBServiceImpl.java | 50 +++++++++++ .../GetClusterSpecRequest.java | 28 ++++++ .../GetClusterSpecResponse.java | 24 +++++ .../impl/pb/GetClusterSpecRequestPBImpl.java | 64 +++++++++++++ .../impl/pb/GetClusterSpecResponsePBImpl.java | 90 +++++++++++++++++++ .../src/main/proto/TensorflowCluster.proto | 27 +++++- .../yarn_tensorflow_cluster_protos.proto | 25 +++++- 14 files changed, 560 insertions(+), 11 deletions(-) create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TFClient.java create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterImpl.java create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterPB.java create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecRequest.java create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecResponse.java create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java create mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java diff --git a/hadoop-yarn-project/hadoop-yarn/pom.xml b/hadoop-yarn-project/hadoop-yarn/pom.xml index eb93715450a2a..4386adf58a8f4 100644 --- a/hadoop-yarn-project/hadoop-yarn/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/pom.xml @@ -237,6 +237,6 @@ hadoop-yarn-client hadoop-yarn-registry hadoop-yarn-ui - hadoop-yarn-tfrpc + tensorflow-rpc diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml index 3e9cc56017f36..33c5ecbcc225e 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml @@ -22,9 +22,9 @@ 3.0.0-alpha2-SNAPSHOT 4.0.0 - hadoop-yarn-tfrpc + tensorflow-rpc 3.0.0-alpha2-SNAPSHOT - Apache Hadoop YARN Common + Apache Hadoop YARN TFRPC @@ -34,6 +34,12 @@ + + org.apache.hadoop + hadoop-common + provided + + org.apache.hadoop hadoop-yarn-common @@ -57,13 +63,14 @@ ${basedir}/../../../hadoop-common-project/hadoop-common/src/main/proto ${basedir}/../../../hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto + ${basedir}/../hadoop-yarn-api/src/main/proto ${basedir}/src/main/proto ${basedir}/src/main/proto - TensorflowCluster.proto yarn_tensorflow_cluster_protos.proto + TensorflowCluster.proto diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TFClient.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TFClient.java new file mode 100644 index 0000000000000..93ac65bb3687e --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TFClient.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.tensorflow.api; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.retry.RetryPolicy; +import org.apache.hadoop.io.retry.RetryProxy; +import org.apache.hadoop.yarn.client.RMProxy; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.factories.RecordFactory; +import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; + +import java.io.IOException; +import java.net.InetSocketAddress; + +public class TFClient { + private RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); + private TensorflowCluster tensorflow; + + public TFClient(Configuration conf, InetSocketAddress server) throws IOException { + RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf, false); + TensorflowCluster proxy = RMProxy.createRMProxy(conf, TensorflowCluster.class, server); + this.tensorflow = (TensorflowCluster) RetryProxy.create( + TensorflowCluster.class, proxy, retryPolicy); + } + + public String getClusterSpec() throws IOException, YarnException { + GetClusterSpecResponse response = + this.tensorflow.getClusterSpec(recordFactory.newRecordInstance(GetClusterSpecRequest.class)); + return response.getClusterSpec(); + } + + public static void main(String[] args) throws IOException, YarnException { + InetSocketAddress address = new InetSocketAddress("localhost", 9001); + TFClient client = new TFClient(new Configuration(), address); + System.out.println(client.getClusterSpec()); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java index 06e4955ed8d96..976e281a9948a 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java @@ -1,4 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.yarn.tensorflow.api; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; + +import java.io.IOException; + public interface TensorflowCluster { + public GetClusterSpecResponse getClusterSpec(GetClusterSpecRequest request) + throws YarnException, IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterImpl.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterImpl.java new file mode 100644 index 0000000000000..4c402ddfff68e --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterImpl.java @@ -0,0 +1,77 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.tensorflow.api; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.ipc.Server; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.factories.RecordFactory; +import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.apache.hadoop.yarn.ipc.YarnRPC; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; + +import java.io.IOException; +import java.net.InetSocketAddress; + +public class TensorflowClusterImpl extends AbstractService implements TensorflowCluster { + private Server server; + private static final RecordFactory recordFactory = + RecordFactoryProvider.getRecordFactory(null); + + public TensorflowClusterImpl() { + super(TensorflowClusterImpl.class.getName()); + } + + @Override + protected void serviceStart() throws Exception { + super.serviceStart(); + Configuration conf = getConfig(); + YarnRPC rpc = YarnRPC.create(conf); + InetSocketAddress address = new InetSocketAddress("localhost", 9001); + this.server = rpc.getServer( + TensorflowCluster.class, this, address, conf, null, + conf.getInt(YarnConfiguration.RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT, + YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT)); + + this.server.start(); + } + + @Override + public GetClusterSpecResponse getClusterSpec(GetClusterSpecRequest request) throws YarnException, IOException { + GetClusterSpecResponse response = recordFactory.newRecordInstance(GetClusterSpecResponse.class); + response.setClusterSpec("this is a test"); + return response; + } + + @Override + protected void serviceStop() throws Exception { + if (this.server != null) { + this.server.stop(); + } + super.serviceStop(); + } + + public static void main(String[] args) { + TensorflowClusterImpl server = new TensorflowClusterImpl(); + server.init(new Configuration()); + server.start(); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterPB.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterPB.java new file mode 100644 index 0000000000000..2c82b07c5fa46 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterPB.java @@ -0,0 +1,27 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.tensorflow.api; + +import org.apache.hadoop.ipc.ProtocolInfo; +import org.apache.hadoop.yarn.proto.TensorflowCluster.TensorflowClusterService; + +@ProtocolInfo( + protocolName = "org.apache.hadoop.yarn.server.api.ResourceTrackerPB", + protocolVersion = 1) +public interface TensorflowClusterPB extends TensorflowClusterService.BlockingInterface { +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java new file mode 100644 index 0000000000000..ecd5597856268 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.tensorflow.api.impl.pb.client; + +import com.google.protobuf.ServiceException; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.RPC; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.ipc.RPCUtil; +import org.apache.hadoop.yarn.tensorflow.api.TensorflowCluster; +import org.apache.hadoop.yarn.tensorflow.api.TensorflowClusterPB; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecResponsePBImpl; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecRequestPBImpl; +import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecRequestProto; + +import java.io.Closeable; +import java.io.IOException; +import java.net.InetSocketAddress; + +public class TensorflowClusterPBClientImpl implements TensorflowCluster, Closeable { + private TensorflowClusterPB proxy; + + public TensorflowClusterPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { + RPC.setProtocolEngine(conf, TensorflowClusterPB.class, ProtobufRpcEngine.class); + proxy = (TensorflowClusterPB)RPC.getProxy( + TensorflowClusterPB.class, clientVersion, addr, conf); + } + + @Override + public void close() { + if(this.proxy != null) { + RPC.stopProxy(this.proxy); + } + } + + @Override + public GetClusterSpecResponse getClusterSpec(GetClusterSpecRequest request) throws YarnException, IOException { + GetClusterSpecRequestProto requestProto = ((GetClusterSpecRequestPBImpl)request).getProto(); + try { + return new GetClusterSpecResponsePBImpl(proxy.getClusterSpec(null, requestProto)); + } catch (ServiceException e) { + RPCUtil.unwrapAndThrowException(e); + return null; + } + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java new file mode 100644 index 0000000000000..37dcdc0b65636 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java @@ -0,0 +1,50 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.tensorflow.api.impl.pb.service; + +import com.google.protobuf.RpcController; +import com.google.protobuf.ServiceException; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.tensorflow.api.TensorflowCluster; +import org.apache.hadoop.yarn.tensorflow.api.TensorflowClusterPB; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecResponsePBImpl; +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecRequestPBImpl; +import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecRequestProto; +import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecResponseProto; + +import java.io.IOException; + +public class TensorflowClusterPBServiceImpl implements TensorflowClusterPB { + private TensorflowCluster real; + + public TensorflowClusterPBServiceImpl(TensorflowCluster impl) { + this.real = impl; + } + + @Override + public GetClusterSpecResponseProto getClusterSpec(RpcController controller, GetClusterSpecRequestProto proto) throws ServiceException { + GetClusterSpecRequestPBImpl request = new GetClusterSpecRequestPBImpl(proto); + try { + GetClusterSpecResponse response = real.getClusterSpec(request); + return ((GetClusterSpecResponsePBImpl)response).getProto(); + } catch (YarnException | IOException e) { + throw new ServiceException(e); + } + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecRequest.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecRequest.java new file mode 100644 index 0000000000000..9742117bfec56 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecRequest.java @@ -0,0 +1,28 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.tensorflow.api.protocolrecords; + +import org.apache.hadoop.yarn.util.Records; + +public abstract class GetClusterSpecRequest { + + public static GetClusterSpecRequest newInstance() { + GetClusterSpecRequest request = Records.newRecord(GetClusterSpecRequest.class); + return request; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecResponse.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecResponse.java new file mode 100644 index 0000000000000..ff618b885463b --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecResponse.java @@ -0,0 +1,24 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.tensorflow.api.protocolrecords; + +public abstract class GetClusterSpecResponse { + public abstract String getClusterSpec(); + + public abstract void setClusterSpec(String clusterSpec); +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java new file mode 100644 index 0000000000000..bc7d59ed31397 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb; + +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecRequestProto; + +public class GetClusterSpecRequestPBImpl extends GetClusterSpecRequest { + private GetClusterSpecRequestProto proto = + GetClusterSpecRequestProto.getDefaultInstance(); + private GetClusterSpecRequestProto.Builder builder = null; + private boolean viaProto = false; + + private boolean rebuild = false; + + public GetClusterSpecRequestPBImpl() { + builder = GetClusterSpecRequestProto.newBuilder(); + } + + public GetClusterSpecRequestPBImpl(GetClusterSpecRequestProto proto) { + this.proto = proto; + viaProto = true; + } + + private void mergeLocalToProto() { + if (viaProto) { + maybeInitBuilder(); + } + proto = builder.build(); + rebuild = false; + viaProto = true; + } + + public GetClusterSpecRequestProto getProto() { + if (rebuild) { + mergeLocalToProto(); + } + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = GetClusterSpecRequestProto.newBuilder(proto); + } + viaProto = false; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java new file mode 100644 index 0000000000000..aa7d1cf9c2ada --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java @@ -0,0 +1,90 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb; + +import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; +import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecResponseProto; +import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecResponseProtoOrBuilder; + +public class GetClusterSpecResponsePBImpl extends GetClusterSpecResponse { + GetClusterSpecResponseProto proto = GetClusterSpecResponseProto.getDefaultInstance(); + GetClusterSpecResponseProto.Builder builder = null; + private boolean viaProto = false; + + private String clusterSpec = null; + + public GetClusterSpecResponsePBImpl() { + builder = GetClusterSpecResponseProto.newBuilder(); + } + + public GetClusterSpecResponsePBImpl(GetClusterSpecResponseProto proto) { + this.proto = proto; + viaProto = true; + } + + public GetClusterSpecResponseProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + private void mergeLocalToProto() { + if (viaProto) { + maybeInitBuilder(); + } + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + private void mergeLocalToBuilder() { + if (this.clusterSpec != null) { + builder.setClusterSpec(this.clusterSpec); + } + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = GetClusterSpecResponseProto.newBuilder(proto); + } + viaProto = false; + } + + @Override + public String getClusterSpec() { + GetClusterSpecResponseProtoOrBuilder p = viaProto ? proto : builder; + if (this.clusterSpec != null) { + return this.clusterSpec; + } + if (!p.hasClusterSpec()) { + return null; + } + this.clusterSpec = p.getClusterSpec(); + return this.clusterSpec; + } + + @Override + public void setClusterSpec(String clusterSpec) { + maybeInitBuilder(); + if (clusterSpec == null) { + builder.clearClusterSpec(); + } + this.clusterSpec = clusterSpec; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto index 318b00f347354..2348ff1cc4a11 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto @@ -1,9 +1,28 @@ -option java_package = "org.apache.hadoop.yarn.tensorflow.proto"; +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +option java_package = "org.apache.hadoop.yarn.proto"; option java_outer_classname = "TensorflowCluster"; option java_generic_services = true; -option java_generate_equal_and_hash = true; -import "yarn_server_common_service_protos.proto" +option java_generate_equals_and_hash = true; +import "yarn_tensorflow_cluster_protos.proto"; -service TensorflowCluster { +service TensorflowClusterService { rpc getClusterSpec(GetClusterSpecRequestProto) returns (GetClusterSpecResponseProto); } \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto index 2f287674e8212..96117de4e9492 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto +++ b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto @@ -1,8 +1,27 @@ -import "yarn_protos.proto" -import "yarn_server_common_protos.proto" +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import "yarn_protos.proto"; +option java_package = "org.apache.hadoop.yarn.proto"; +option java_outer_classname = "YarnTensorflowClusterProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; message GetClusterSpecRequestProto { - } message GetClusterSpecResponseProto { From 993dd03c0fc196c6feb532c9ad178fa2bde3e2c3 Mon Sep 17 00:00:00 2001 From: huafengw Date: Mon, 26 Dec 2016 17:58:33 +0800 Subject: [PATCH 09/32] move to the proper module --- .../pom.xml | 51 ++++++++---- .../tensorflow/api/TFClient.java | 6 +- .../tensorflow/api/TensorflowCluster.java | 6 +- .../tensorflow/api/TensorflowClusterImpl.java | 6 +- .../tensorflow/api/TensorflowClusterPB.java | 2 +- .../client/TensorflowClusterPBClientImpl.java | 14 ++-- .../TensorflowClusterPBServiceImpl.java | 12 +-- .../GetClusterSpecRequest.java | 2 +- .../GetClusterSpecResponse.java | 2 +- .../impl/pb/GetClusterSpecRequestPBImpl.java | 4 +- .../impl/pb/GetClusterSpecResponsePBImpl.java | 4 +- .../src/main/proto/TensorflowCluster.proto | 0 .../yarn_tensorflow_cluster_protos.proto | 0 hadoop-yarn-project/hadoop-yarn/pom.xml | 1 - .../hadoop-yarn/tensorflow-rpc/pom.xml | 82 ------------------- 15 files changed, 66 insertions(+), 126 deletions(-) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/TFClient.java (89%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/TensorflowCluster.java (80%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/TensorflowClusterImpl.java (91%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/TensorflowClusterPB.java (94%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java (77%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java (76%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/protocolrecords/GetClusterSpecRequest.java (93%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/protocolrecords/GetClusterSpecResponse.java (92%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java (91%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc/src/main/java/org/apache/hadoop/yarn => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications}/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java (93%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow}/src/main/proto/TensorflowCluster.proto (100%) rename hadoop-yarn-project/hadoop-yarn/{tensorflow-rpc => hadoop-yarn-applications/hadoop-yarn-applications-tensorflow}/src/main/proto/yarn_tensorflow_cluster_protos.proto (100%) delete mode 100644 hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml index 62f5e2c051edb..4212120bb01f0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml @@ -22,7 +22,6 @@ 3.0.0-alpha2-SNAPSHOT 4.0.0 - org.apache.hadoop hadoop-yarn-applications-tensorflow 3.0.0-alpha2-SNAPSHOT Apache Hadoop YARN tensorflow @@ -36,8 +35,6 @@ - - ${project.parent.parent.basedir} 1.9.13 @@ -56,15 +53,12 @@ test - - - org.tensorflow - java-bridge - 0.1.0 + org.tensorflow + java-bridge + 0.1.0 - org.codehaus.jackson jackson-mapper-asl @@ -215,11 +209,11 @@ - - - org.apache.hadoop.yarn.applications.tensorflow.Client - - + + + org.apache.hadoop.yarn.applications.tensorflow.Client + + @@ -231,6 +225,35 @@ + + org.apache.hadoop + hadoop-maven-plugins + + + compile-protoc + + protoc + + + ${protobuf.version} + ${protoc.path} + + ${basedir}/../../../../hadoop-common-project/hadoop-common/src/main/proto + ${basedir}/../../../../hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto + ${basedir}/../../hadoop-yarn-api/src/main/proto + ${basedir}/src/main/proto + + + ${basedir}/src/main/proto + + yarn_tensorflow_cluster_protos.proto + TensorflowCluster.proto + + + + + + diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TFClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TFClient.java similarity index 89% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TFClient.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TFClient.java index 93ac65bb3687e..0c62ce688832b 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TFClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TFClient.java @@ -15,17 +15,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api; +package org.apache.hadoop.yarn.applications.tensorflow.api; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryProxy; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; import org.apache.hadoop.yarn.client.RMProxy; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; import java.io.IOException; import java.net.InetSocketAddress; diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowCluster.java similarity index 80% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowCluster.java index 976e281a9948a..2fa254b5e76f8 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowCluster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowCluster.java @@ -15,11 +15,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api; +package org.apache.hadoop.yarn.applications.tensorflow.api; import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; import java.io.IOException; diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java similarity index 91% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterImpl.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java index 4c402ddfff68e..1bd9d11f40ef5 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api; +package org.apache.hadoop.yarn.applications.tensorflow.api; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.Server; @@ -25,8 +25,8 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; import java.io.IOException; import java.net.InetSocketAddress; diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterPB.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterPB.java similarity index 94% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterPB.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterPB.java index 2c82b07c5fa46..1d12263c641c3 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/TensorflowClusterPB.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterPB.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api; +package org.apache.hadoop.yarn.applications.tensorflow.api; import org.apache.hadoop.ipc.ProtocolInfo; import org.apache.hadoop.yarn.proto.TensorflowCluster.TensorflowClusterService; diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java similarity index 77% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java index ecd5597856268..cf677a8f049bd 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/impl/pb/client/TensorflowClusterPBClientImpl.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api.impl.pb.client; +package org.apache.hadoop.yarn.applications.tensorflow.api.impl.pb.client; import com.google.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; @@ -23,13 +23,13 @@ import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; -import org.apache.hadoop.yarn.tensorflow.api.TensorflowCluster; -import org.apache.hadoop.yarn.tensorflow.api.TensorflowClusterPB; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecResponsePBImpl; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecRequestPBImpl; import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecRequestProto; +import org.apache.hadoop.yarn.applications.tensorflow.api.TensorflowCluster; +import org.apache.hadoop.yarn.applications.tensorflow.api.TensorflowClusterPB; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecRequestPBImpl; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecResponsePBImpl; import java.io.Closeable; import java.io.IOException; diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java similarity index 76% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java index 37dcdc0b65636..b271b6b9a1e98 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/impl/pb/service/TensorflowClusterPBServiceImpl.java @@ -15,18 +15,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api.impl.pb.service; +package org.apache.hadoop.yarn.applications.tensorflow.api.impl.pb.service; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.tensorflow.api.TensorflowCluster; -import org.apache.hadoop.yarn.tensorflow.api.TensorflowClusterPB; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecResponsePBImpl; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecRequestPBImpl; import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecRequestProto; import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecResponseProto; +import org.apache.hadoop.yarn.applications.tensorflow.api.TensorflowCluster; +import org.apache.hadoop.yarn.applications.tensorflow.api.TensorflowClusterPB; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecRequestPBImpl; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.impl.pb.GetClusterSpecResponsePBImpl; import java.io.IOException; diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/GetClusterSpecRequest.java similarity index 93% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecRequest.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/GetClusterSpecRequest.java index 9742117bfec56..7fa4111b11c1e 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecRequest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/GetClusterSpecRequest.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api.protocolrecords; +package org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords; import org.apache.hadoop.yarn.util.Records; diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/GetClusterSpecResponse.java similarity index 92% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecResponse.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/GetClusterSpecResponse.java index ff618b885463b..20482cab45977 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/GetClusterSpecResponse.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/GetClusterSpecResponse.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api.protocolrecords; +package org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords; public abstract class GetClusterSpecResponse { public abstract String getClusterSpec(); diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java similarity index 91% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java index bc7d59ed31397..2aeae28397c38 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecRequestPBImpl.java @@ -15,10 +15,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb; +package org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.impl.pb; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecRequest; import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecRequestProto; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; public class GetClusterSpecRequestPBImpl extends GetClusterSpecRequest { private GetClusterSpecRequestProto proto = diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java similarity index 93% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java index aa7d1cf9c2ada..ffb8b48dee6cb 100644 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/java/org/apache/hadoop/yarn/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/protocolrecords/impl/pb/GetClusterSpecResponsePBImpl.java @@ -15,11 +15,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.yarn.tensorflow.api.protocolrecords.impl.pb; +package org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.impl.pb; -import org.apache.hadoop.yarn.tensorflow.api.protocolrecords.GetClusterSpecResponse; import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecResponseProto; import org.apache.hadoop.yarn.proto.YarnTensorflowClusterProtos.GetClusterSpecResponseProtoOrBuilder; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; public class GetClusterSpecResponsePBImpl extends GetClusterSpecResponse { GetClusterSpecResponseProto proto = GetClusterSpecResponseProto.getDefaultInstance(); diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/proto/TensorflowCluster.proto similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/TensorflowCluster.proto rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/proto/TensorflowCluster.proto diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/proto/yarn_tensorflow_cluster_protos.proto similarity index 100% rename from hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/src/main/proto/yarn_tensorflow_cluster_protos.proto rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/proto/yarn_tensorflow_cluster_protos.proto diff --git a/hadoop-yarn-project/hadoop-yarn/pom.xml b/hadoop-yarn-project/hadoop-yarn/pom.xml index 4386adf58a8f4..c92929210c087 100644 --- a/hadoop-yarn-project/hadoop-yarn/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/pom.xml @@ -237,6 +237,5 @@ hadoop-yarn-client hadoop-yarn-registry hadoop-yarn-ui - tensorflow-rpc diff --git a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml b/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml deleted file mode 100644 index 33c5ecbcc225e..0000000000000 --- a/hadoop-yarn-project/hadoop-yarn/tensorflow-rpc/pom.xml +++ /dev/null @@ -1,82 +0,0 @@ - - - - - hadoop-yarn - org.apache.hadoop - 3.0.0-alpha2-SNAPSHOT - - 4.0.0 - tensorflow-rpc - 3.0.0-alpha2-SNAPSHOT - Apache Hadoop YARN TFRPC - - - - ${project.parent.basedir} - true - ../dev-support - - - - - org.apache.hadoop - hadoop-common - provided - - - - org.apache.hadoop - hadoop-yarn-common - - - - - - - org.apache.hadoop - hadoop-maven-plugins - - - compile-protoc - - protoc - - - ${protobuf.version} - ${protoc.path} - - ${basedir}/../../../hadoop-common-project/hadoop-common/src/main/proto - ${basedir}/../../../hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto - ${basedir}/../hadoop-yarn-api/src/main/proto - ${basedir}/src/main/proto - - - ${basedir}/src/main/proto - - yarn_tensorflow_cluster_protos.proto - TensorflowCluster.proto - - - - - - - - - From b9e391adc32fc7b18951a2770e02665680b5a753 Mon Sep 17 00:00:00 2001 From: huafengw Date: Tue, 27 Dec 2016 10:15:02 +0800 Subject: [PATCH 10/32] remove AbstractService --- .../pom.xml | 2 +- .../tensorflow/api/TensorflowClusterImpl.java | 20 ++++++------------- 2 files changed, 7 insertions(+), 15 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml index 4212120bb01f0..051418ada29c0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/pom.xml @@ -44,7 +44,7 @@ org.apache.hadoop hadoop-common - provided + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java index 1bd9d11f40ef5..980f0e7caa7bb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java @@ -19,7 +19,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.Server; -import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; @@ -31,19 +30,15 @@ import java.io.IOException; import java.net.InetSocketAddress; -public class TensorflowClusterImpl extends AbstractService implements TensorflowCluster { +public class TensorflowClusterImpl implements TensorflowCluster { private Server server; private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); - public TensorflowClusterImpl() { - super(TensorflowClusterImpl.class.getName()); - } + public TensorflowClusterImpl() {} - @Override - protected void serviceStart() throws Exception { - super.serviceStart(); - Configuration conf = getConfig(); + public void start() throws Exception { + Configuration conf = new Configuration(); YarnRPC rpc = YarnRPC.create(conf); InetSocketAddress address = new InetSocketAddress("localhost", 9001); this.server = rpc.getServer( @@ -61,17 +56,14 @@ public GetClusterSpecResponse getClusterSpec(GetClusterSpecRequest request) thro return response; } - @Override - protected void serviceStop() throws Exception { + public void stop() throws Exception { if (this.server != null) { this.server.stop(); } - super.serviceStop(); } - public static void main(String[] args) { + public static void main(String[] args) throws Exception { TensorflowClusterImpl server = new TensorflowClusterImpl(); - server.init(new Configuration()); server.start(); } } From 3750d6c0fda296d6d924143e89aff03229aef1ca Mon Sep 17 00:00:00 2001 From: muzhongz Date: Tue, 27 Dec 2016 12:32:34 +0800 Subject: [PATCH 11/32] APP: change options for client and appmaster --- .../tensorflow/ApplicationMaster.java | 1 + .../yarn/applications/tensorflow/Client.java | 5 ++- .../applications/tensorflow/DSConstants.java | 2 ++ .../tensorflow/TFApplication.java | 5 +++ .../applications/tensorflow/TFClient.java | 32 +++++++++++++++++-- 5 files changed, 40 insertions(+), 5 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index 96a92afd96b87..a1b1e2c4b2613 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -410,6 +410,7 @@ public boolean init(String[] args) throws ParseException, IOException { throw new IllegalArgumentException( "Cannot run distributed shell with no containers"); } + numTotalWokerContainers = numTotalContainers; requestPriority = Integer.parseInt(cliParser .getOptionValue("priority", "0")); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index 081c3892455f7..231882b95f36d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -603,7 +603,6 @@ public boolean run() throws IOException, YarnException { /** * Monitor the submitted application for completion. - * Kill application if time expires. * @param appId Application Id of application to be monitored * @return true if application completed successfully * @throws YarnException @@ -659,11 +658,11 @@ else if (YarnApplicationState.KILLED == state return false; } - if (System.currentTimeMillis() > (clientStartTime + clientTimeout)) { +/* if (System.currentTimeMillis() > (clientStartTime + clientTimeout)) { LOG.info("Reached client specified timeout for application. Killing application"); forceKillApplication(appId); return false; - } + }*/ } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java index f254fdf8faabe..374ac2bade4d2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java @@ -51,4 +51,6 @@ public class DSConstants { public static final String DISTRIBUTEDSHELLTIMELINEDOMAIN = "DISTRIBUTEDSHELLTIMELINEDOMAIN"; public static final String APP_NAME = "tenseoflow"; + + public static final int INVALID_TCP_PORT = -1; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java index f1b4cd72050f3..963ea397bbe9d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java @@ -8,4 +8,9 @@ public class TFApplication { public static final String OPT_TF_CLIENT_PY = "tf_clientpy"; public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; public static final String OPT_TF_SERVER_PY = "tf_serverpy"; + + + public static String makeOption(String opt, String val) { + return "--" + opt + " " + val; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java index 3b26566dfba25..d8dfb20084568 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java @@ -6,6 +6,7 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import java.util.Vector; /** * Created by muzhongz on 16-11-30. @@ -16,7 +17,13 @@ public class TFClient implements Runnable { public static final String TF_CLIENT_PY = "tf_client.py"; private String tfClientPy; private String tfMasterAddress; - private int tfMasterPort; + private int tfMasterPort = DSConstants.INVALID_TCP_PORT; + private String currentDirectory; + private static final String OPT_MASTER_ADDRESS = "ma"; + private static final String OPT_MASTER_PORT = "mp"; + + + private void execCmd(String cmd) { Process process = null; @@ -54,10 +61,31 @@ public TFClient(String tfClientPy) { this.tfClientPy = tfClientPy; } + private String makeOption(String opt, String val) { + + if (opt == null || opt.equals("")) { + return ""; + } + + String lead = "--"; + + if (val == null) { + lead += opt; + return lead; + } + + lead += opt; + lead += " "; + lead += val; + return lead; + } + + @Override public void run() { execCmd("ls -l"); - if (tfMasterAddress == null || tfMasterPort == 0) { + + if (tfMasterAddress == null || tfMasterPort == DSConstants.INVALID_TCP_PORT) { LOG.fatal("invalid master address!"); execCmd("python " + tfClientPy); } else { From ea04b4aff589e02227da5c94be4435cf24c36bde Mon Sep 17 00:00:00 2001 From: muzhongz Date: Tue, 27 Dec 2016 17:20:01 +0800 Subject: [PATCH 12/32] RPC: integrate client-am rpc --- .../tensorflow/ApplicationMaster.java | 53 +++++++------ .../yarn/applications/tensorflow/Client.java | 43 +++++++++- .../applications/tensorflow/ClusterSpec.java | 5 ++ .../tensorflow/TFApplication.java | 8 ++ .../tensorflow/TFApplicationRpc.java | 12 +++ .../tensorflow/TFApplicationRpcClient.java | 50 ++++++++++++ .../tensorflow/TFApplicationRpcServer.java | 78 +++++++++++++++++++ .../applications/tensorflow/TFClient.java | 21 ++++- .../applications/tensorflow/TFContainer.java | 2 +- .../tensorflow/TFMasterAddress.java | 25 ++++++ ...{DSConstants.java => TFYarnConstants.java} | 2 +- 11 files changed, 266 insertions(+), 33 deletions(-) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpc.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcClient.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFMasterAddress.java rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/{DSConstants.java => TFYarnConstants.java} (98%) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index a1b1e2c4b2613..d14753be2ba97 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -24,9 +24,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.StringReader; -import java.lang.reflect.UndeclaredThrowableException; import java.nio.ByteBuffer; -import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -57,27 +55,21 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.Shell; +import org.apache.hadoop.util.SysInfo; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.*; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest; -import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync; import org.apache.hadoop.yarn.client.api.async.NMClientAsync; import org.apache.hadoop.yarn.client.api.async.impl.NMClientAsyncImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; -import org.apache.hadoop.yarn.util.timeline.TimelineUtils; import org.apache.log4j.LogManager; import com.google.common.annotations.VisibleForTesting; -import com.sun.jersey.api.client.ClientHandlerException; @InterfaceAudience.Public @@ -219,6 +211,8 @@ public ByteBuffer getAllTokens() { private int yarnShellIdCounter = 1; private String tfClientPy; + private ClusterSpec clusterSpec; + private String appName; public String getAppName() {return appName;} @@ -442,6 +436,8 @@ public boolean init(String[] args) throws ParseException, IOException { tfServerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR); } + clusterSpec = ClusterSpec.makeClusterSpec(); + return true; } @@ -454,6 +450,21 @@ private void printUsage(Options opts) { new HelpFormatter().printHelp("ApplicationMaster", opts); } + private final class RpcForClient implements TFApplicationRpc { + + @Override + public String getClusterSpec() throws IOException, YarnException { + if (clusterSpec != null && clusterSpec.getMasterNode() != null) { + TFServerAddress server = clusterSpec.getMasterNode(); + String master = server.getAddress() + ":" + server.getPort(); + return master; + } + + return ""; + } + } + + /** * Main run function for the application master * @@ -506,9 +517,15 @@ public void run() throws YarnException, IOException, InterruptedException { // TODO use the rpc port info to register with the RM for the client to // send requests to this app master + appMasterHostname = System.getenv(Environment.NM_HOST.name()); + LOG.info("rpc server address <" + appMasterHostname + ">"); + TFApplicationRpcServer rpcServer = new TFApplicationRpcServer(appMasterHostname, new RpcForClient()); + appMasterRpcPort = rpcServer.getRpcPort(); + rpcServer.startRpcServiceThread(); + // Register self with ResourceManager // This will start heartbeating to the RM - appMasterHostname = NetUtils.getHostname(); + RegisterApplicationMasterResponse response = amRMClient .registerApplicationMaster(appMasterHostname, appMasterRpcPort, appMasterTrackingUrl); @@ -559,20 +576,6 @@ public void run() throws YarnException, IOException, InterruptedException { } numRequestedContainers.set(numTotalContainers); - //createTFClientThread(); - } - - private void createTFClientThread() { - - Thread thread = new Thread(new Runnable() { - @Override - public void run() { - LOG.info("tensorflow client will be run!"); - TFClient tfClient = new TFClient(TFClient.TF_CLIENT_PY); - tfClient.run(); - } - }); - thread.start(); } @VisibleForTesting @@ -642,7 +645,7 @@ protected boolean finish() { public boolean startAllContainers() throws Exception { if (numAllocatedContainers.get() == numTotalContainers) { - ClusterSpec clusterSpec = ClusterSpec.makeClusterSpec(); + int numWorkerContainers = 0; int numPsContainers = 0; if (this.allocatedContainers.size() < numTotalWokerContainers + numTotalParamServerContainer) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index 231882b95f36d..722bf642ac489 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -140,6 +140,8 @@ public String getAppName() { private String flowVersion = null; private long flowRunId = 0L; + private String masterAddress; + // Command line options private Options opts; @@ -154,6 +156,8 @@ public String getAppName() { //public static final String OPT_TF_CLIENT = "tf_client"; //public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; + private TFApplicationRpc appRpc = null; + /** * @param args Command line arguments */ @@ -588,9 +592,7 @@ public boolean run() throws IOException, YarnException { yarnClient.submitApplication(appContext); - // run tensorflow client python script - TFClient tfClient = new TFClient(tfClientPy); - tfClient.startTensorflowClient(); + // TODO // Try submitting the same request again @@ -601,6 +603,22 @@ public boolean run() throws IOException, YarnException { } + private void tensorflowServersReady() { + while (true) { + + + try { + Thread.sleep(100); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + + private boolean isEmptyString(String s) { + return s == null || s.equals(""); + } + /** * Monitor the submitted application for completion. * @param appId Application Id of application to be monitored @@ -638,6 +656,25 @@ private boolean monitorApplication(ApplicationId appId) YarnApplicationState state = report.getYarnApplicationState(); FinalApplicationStatus tfStatus = report.getFinalApplicationStatus(); + + if (YarnApplicationState.RUNNING == state) { + if (appRpc == null) { + String hostname = report.getHost(); + int port = report.getRpcPort(); + LOG.info("application master rpc host: " + hostname + "; port: " + port); + appRpc = new TFApplicationRpcClient(hostname, port).getRpc(); + } + + if (appRpc != null && isEmptyString(masterAddress)) { + masterAddress = appRpc.getClusterSpec(); + LOG.info("master node address is " + masterAddress); + if (!isEmptyString(masterAddress)) { + TFClient tfClient = new TFClient(tfClientPy); + tfClient.startTensorflowClient(masterAddress); + } + } + } + if (YarnApplicationState.FINISHED == state) { if (FinalApplicationStatus.SUCCEEDED == tfStatus) { LOG.info("Application has completed successfully. Breaking monitoring loop"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java index 24081eeee5476..629006a89e305 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java @@ -75,6 +75,11 @@ public void addPsSpec(String containerId, String hostName) { this.paramServers.put(containerId, server); } + public TFServerAddress getMasterNode() { + return tfMasterNode; + } + + public String getMasterNodeAddress() { if (tfMasterNode == null) { return null; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java index 963ea397bbe9d..689cee7c30ab1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java @@ -13,4 +13,12 @@ public class TFApplication { public static String makeOption(String opt, String val) { return "--" + opt + " " + val; } + + public static TFApplicationRpc getRpc() { + return null; + } + + public static boolean startRpcService() { + return true; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpc.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpc.java new file mode 100644 index 0000000000000..9a39fc580e467 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpc.java @@ -0,0 +1,12 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +import org.apache.hadoop.yarn.exceptions.YarnException; + +import java.io.IOException; + +/** + * Created by muzhongz on 16-12-27. + */ +public interface TFApplicationRpc { + public String getClusterSpec() throws IOException, YarnException; +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcClient.java new file mode 100644 index 0000000000000..df21577471026 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcClient.java @@ -0,0 +1,50 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.retry.RetryPolicy; +import org.apache.hadoop.io.retry.RetryProxy; +import org.apache.hadoop.yarn.applications.tensorflow.api.*; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; +import org.apache.hadoop.yarn.client.RMProxy; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.factories.RecordFactory; +import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; + +import java.io.IOException; +import java.net.InetSocketAddress; + +/** + * Created by muzhongz on 16-12-27. + */ +public class TFApplicationRpcClient implements TFApplicationRpc { + private String serverAddress; + private int serverPort; + private RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); + private TensorflowCluster tensorflow; + + public TFApplicationRpcClient(String serverAddress, int serverPort) { + this.serverAddress = serverAddress; + this.serverPort = serverPort; + } + + public String getClusterSpec() throws IOException, YarnException { + GetClusterSpecResponse response = + this.tensorflow.getClusterSpec(recordFactory.newRecordInstance(GetClusterSpecRequest.class)); + return response.getClusterSpec(); + } + + public TFApplicationRpc getRpc() { + InetSocketAddress address = new InetSocketAddress(serverAddress, serverPort); + Configuration conf = new Configuration(); + RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf, false); + try { + TensorflowCluster proxy = RMProxy.createRMProxy(conf, TensorflowCluster.class, address); + this.tensorflow = (TensorflowCluster) RetryProxy.create( + TensorflowCluster.class, proxy, retryPolicy); + return this; + } catch (IOException e) { + return null; + } + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java new file mode 100644 index 0000000000000..ea027668b2c97 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java @@ -0,0 +1,78 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.ipc.Server; +import org.apache.hadoop.util.ThreadUtil; +import org.apache.hadoop.yarn.applications.tensorflow.api.TensorflowCluster; +import org.apache.hadoop.yarn.applications.tensorflow.api.TensorflowClusterImpl; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; +import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.factories.RecordFactory; +import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.apache.hadoop.yarn.ipc.YarnRPC; + +import java.io.IOException; +import java.net.InetSocketAddress; + +/** + * Created by muzhongz on 16-12-27. + */ +public class TFApplicationRpcServer implements TensorflowCluster, Runnable { + private int rpcPort = -1; + private String rpcAddress = null; + + public int getRpcPort() { + return rpcPort; + } + + public void setRpcPort(int rpcPort) { + this.rpcPort = rpcPort; + } + + public String getRpcAddress() { + return rpcAddress; + } + + public void setRpcAddress(String rpcAddress) { + this.rpcAddress = rpcAddress; + } + + private static final RecordFactory recordFactory = + RecordFactoryProvider.getRecordFactory(null); + + private TFApplicationRpc appRpc = null; + private Server server; + + public TFApplicationRpcServer(String hostname, TFApplicationRpc rpc) { + this.setRpcAddress(hostname); + this.setRpcPort(10000 + ((int)(Math.random() * (5000)) + 1)); + this.appRpc = rpc; + } + + @Override + public GetClusterSpecResponse getClusterSpec(GetClusterSpecRequest request) throws YarnException, IOException { + GetClusterSpecResponse response = recordFactory.newRecordInstance(GetClusterSpecResponse.class); + response.setClusterSpec(this.appRpc.getClusterSpec()); + return response; + } + + public void startRpcServiceThread() { + Thread thread = new Thread(this); + thread.start(); + } + + @Override + public void run() { + Configuration conf = new Configuration(); + YarnRPC rpc = YarnRPC.create(conf); + InetSocketAddress address = new InetSocketAddress(rpcAddress, rpcPort); + this.server = rpc.getServer( + TensorflowCluster.class, this, address, conf, null, + conf.getInt(YarnConfiguration.RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT, + YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT)); + + this.server.start(); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java index d8dfb20084568..73c511c156a6b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java @@ -6,7 +6,6 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; -import java.util.Vector; /** * Created by muzhongz on 16-11-30. @@ -17,7 +16,7 @@ public class TFClient implements Runnable { public static final String TF_CLIENT_PY = "tf_client.py"; private String tfClientPy; private String tfMasterAddress; - private int tfMasterPort = DSConstants.INVALID_TCP_PORT; + private int tfMasterPort = TFYarnConstants.INVALID_TCP_PORT; private String currentDirectory; private static final String OPT_MASTER_ADDRESS = "ma"; private static final String OPT_MASTER_PORT = "mp"; @@ -85,7 +84,7 @@ private String makeOption(String opt, String val) { public void run() { execCmd("ls -l"); - if (tfMasterAddress == null || tfMasterPort == DSConstants.INVALID_TCP_PORT) { + if (tfMasterAddress == null || tfMasterPort == TFYarnConstants.INVALID_TCP_PORT) { LOG.fatal("invalid master address!"); execCmd("python " + tfClientPy); } else { @@ -98,6 +97,22 @@ public void startTensorflowClient() { thread.start(); } + public void startTensorflowClient(String masterNode) { + if (masterNode == null || masterNode.equals("")) { + return; + } + Thread thread = new Thread(new Runnable() { + @Override + public void run() { + String cmd = "python " + tfClientPy + " " + masterNode; + LOG.info("TF client command is [" + cmd + "]"); + execCmd(cmd); + } + }); + thread.start(); + + } + public void tensorflowServersReady(boolean ready, String masterAddress, int port) { LOG.info("tf master : " + masterAddress + ":" + port); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java index d909a78384617..f9ec4b3ee3d39 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java @@ -25,7 +25,7 @@ public class TFContainer { private static final Log LOG = LogFactory.getLog(TFContainer.class); - private String appName = DSConstants.APP_NAME; + private String appName = TFYarnConstants.APP_NAME; private ApplicationMaster appMaster; public static final String SERVER_PY_PATH = "tf_server.py"; public static final String SERVER_JAR_PATH = "TFServer.jar"; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFMasterAddress.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFMasterAddress.java new file mode 100644 index 0000000000000..9f07f0569af1b --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFMasterAddress.java @@ -0,0 +1,25 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +/** + * Created by muzhongz on 16-12-27. + */ +public class TFMasterAddress { + private String address; + private int port; + + public String getAddress() { + return address; + } + + public void setAddress(String address) { + this.address = address; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFYarnConstants.java similarity index 98% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFYarnConstants.java index 374ac2bade4d2..1de1ee2c2f655 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/DSConstants.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFYarnConstants.java @@ -26,7 +26,7 @@ */ @InterfaceAudience.Public @InterfaceStability.Unstable -public class DSConstants { +public class TFYarnConstants { /** * Environment key name pointing to the shell script's location From 5e1a71f721173b5a0f9905d3135c4feacee6964d Mon Sep 17 00:00:00 2001 From: muzhongz Date: Tue, 27 Dec 2016 17:26:17 +0800 Subject: [PATCH 13/32] PM: rm useless files --- .../applications/tensorflow/api/TFClient.java | 55 --------------- .../tensorflow/api/TensorflowClusterImpl.java | 69 ------------------- 2 files changed, 124 deletions(-) delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TFClient.java delete mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TFClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TFClient.java deleted file mode 100644 index 0c62ce688832b..0000000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TFClient.java +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.applications.tensorflow.api; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.retry.RetryPolicy; -import org.apache.hadoop.io.retry.RetryProxy; -import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; -import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; -import org.apache.hadoop.yarn.client.RMProxy; -import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; - -import java.io.IOException; -import java.net.InetSocketAddress; - -public class TFClient { - private RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); - private TensorflowCluster tensorflow; - - public TFClient(Configuration conf, InetSocketAddress server) throws IOException { - RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf, false); - TensorflowCluster proxy = RMProxy.createRMProxy(conf, TensorflowCluster.class, server); - this.tensorflow = (TensorflowCluster) RetryProxy.create( - TensorflowCluster.class, proxy, retryPolicy); - } - - public String getClusterSpec() throws IOException, YarnException { - GetClusterSpecResponse response = - this.tensorflow.getClusterSpec(recordFactory.newRecordInstance(GetClusterSpecRequest.class)); - return response.getClusterSpec(); - } - - public static void main(String[] args) throws IOException, YarnException { - InetSocketAddress address = new InetSocketAddress("localhost", 9001); - TFClient client = new TFClient(new Configuration(), address); - System.out.println(client.getClusterSpec()); - } -} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java deleted file mode 100644 index 980f0e7caa7bb..0000000000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/api/TensorflowClusterImpl.java +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.applications.tensorflow.api; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.Server; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; -import org.apache.hadoop.yarn.ipc.YarnRPC; -import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; -import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; - -import java.io.IOException; -import java.net.InetSocketAddress; - -public class TensorflowClusterImpl implements TensorflowCluster { - private Server server; - private static final RecordFactory recordFactory = - RecordFactoryProvider.getRecordFactory(null); - - public TensorflowClusterImpl() {} - - public void start() throws Exception { - Configuration conf = new Configuration(); - YarnRPC rpc = YarnRPC.create(conf); - InetSocketAddress address = new InetSocketAddress("localhost", 9001); - this.server = rpc.getServer( - TensorflowCluster.class, this, address, conf, null, - conf.getInt(YarnConfiguration.RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT, - YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT)); - - this.server.start(); - } - - @Override - public GetClusterSpecResponse getClusterSpec(GetClusterSpecRequest request) throws YarnException, IOException { - GetClusterSpecResponse response = recordFactory.newRecordInstance(GetClusterSpecResponse.class); - response.setClusterSpec("this is a test"); - return response; - } - - public void stop() throws Exception { - if (this.server != null) { - this.server.stop(); - } - } - - public static void main(String[] args) throws Exception { - TensorflowClusterImpl server = new TensorflowClusterImpl(); - server.start(); - } -} From 490c0e842f3a4aa548ba57b55115916e80e0f463 Mon Sep 17 00:00:00 2001 From: muzhongz Date: Wed, 28 Dec 2016 16:11:15 +0800 Subject: [PATCH 14/32] ClIENT, AM: optimize the options in client side and app master side --- .../tensorflow/ApplicationMaster.java | 107 ++---------- .../yarn/applications/tensorflow/Client.java | 161 ++---------------- .../applications/tensorflow/ClusterSpec.java | 38 ++++- .../tensorflow/ClusterSpecException.java | 27 +++ .../tensorflow/LaunchContainerThread.java | 31 +--- .../tensorflow/TFAmContainer.java | 9 +- .../tensorflow/TFApplication.java | 14 +- .../tensorflow/TFApplicationRpcServer.java | 1 - .../applications/tensorflow/TFContainer.java | 1 - .../tensorflow/TFYarnConstants.java | 22 --- 10 files changed, 108 insertions(+), 303 deletions(-) create mode 100644 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpecException.java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index d14753be2ba97..7d0cae8e23804 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -78,20 +78,6 @@ public class ApplicationMaster { private static final Log LOG = LogFactory.getLog(ApplicationMaster.class); - @VisibleForTesting - @Private - public static enum DSEvent { - DS_APP_ATTEMPT_START, DS_APP_ATTEMPT_END, DS_CONTAINER_START, DS_CONTAINER_END - } - - @VisibleForTesting - @Private - public static enum DSEntity { - DS_APP_ATTEMPT, DS_CONTAINER - } - - private static final String YARN_SHELL_ID = "YARN_SHELL_ID"; - // Configuration private Configuration conf; @@ -132,10 +118,6 @@ public ApplicationAttemptId getAppAttempId() { // Tracking url to which app master publishes info for clients to monitor private String appMasterTrackingUrl = ""; -/* private boolean timelineServiceV2 = false;*/ - - // App Master configuration - // No. of containers to run shell command on @VisibleForTesting protected int numTotalContainers = 1; // Memory to request for the container on which the shell command will run @@ -170,24 +152,12 @@ public ApplicationAttemptId getAppAttempId() { private int containerMaxRetries = 0; private int containrRetryInterval = 0; - // Timeline domain ID - private String domainId = null; - // TF server jar file private String tfServerJar = ""; - // Hardcoded path to shell script in launch container's local env - private static final String EXEC_SHELL_STRING_PATH = Client.SCRIPT_PATH - + ".sh"; - private static final String EXEC_BAT_SCRIPT_STRING_PATH = Client.SCRIPT_PATH - + ".bat"; - // Hardcoded path to custom log_properties private static final String log4jPath = "log4j.properties"; - private static final String shellCommandPath = "shellCommands"; - private static final String shellArgsPath = "shellArgs"; - private volatile boolean done; private ByteBuffer allTokens; @@ -198,24 +168,10 @@ public ByteBuffer getAllTokens() { // Launch threads private List launchThreads = new ArrayList(); - // Timeline Client -/* @VisibleForTesting - TimelineClient timelineClient; - static final String CONTAINER_ENTITY_GROUP_ID = "CONTAINERS"; - static final String APPID_TIMELINE_FILTER_NAME = "appId"; - static final String USER_TIMELINE_FILTER_NAME = "user";*/ - - private final String linux_bash_command = "bash"; - private final String windows_command = "cmd /c"; - private int yarnShellIdCounter = 1; - private String tfClientPy; private ClusterSpec clusterSpec; - private String appName; - public String getAppName() {return appName;} - @VisibleForTesting protected final Set launchedContainers = Collections.newSetFromMap(new ConcurrentHashMap()); @@ -302,8 +258,6 @@ public boolean init(String[] args) throws ParseException, IOException { "Amount of memory in MB to be requested to run the shell command"); opts.addOption("container_vcores", true, "Amount of virtual cores to be requested to run the shell command"); - opts.addOption("num_containers", true, - "No. of containers on which the shell command needs to be executed"); opts.addOption("priority", true, "Application Priority. Default 0"); opts.addOption("container_retry_policy", true, "Retry policy when container fails to run, " @@ -317,13 +271,11 @@ public boolean init(String[] args) throws ParseException, IOException { "If container could retry, it specifies max retires"); opts.addOption("container_retry_interval", true, "Interval between each retry, unit is milliseconds"); - opts.addOption("debug", false, "Dump out debug information"); - - opts.addOption("help", false, "Print usage"); opts.addOption("jar", true, "Jar file containing the tensorflow server"); - opts.addOption(TFApplication.OPT_TF_CLIENT, true, "Provide client python of tensorflow"); opts.addOption(TFApplication.OPT_TF_SERVER_JAR, true, "Provide container jar of tensorflow"); + opts.addOption(TFApplication.OPT_TF_WORKER_NUM, true, "Provide worker server number of tensorflow"); + opts.addOption(TFApplication.OPT_TF_PS_NUM, true, "Provide ps server number of tensorflow"); LOG.info("print args"); for (String arg : args) { LOG.info(arg); @@ -347,15 +299,6 @@ public boolean init(String[] args) throws ParseException, IOException { } } - if (cliParser.hasOption("help")) { - printUsage(opts); - return false; - } - - if (cliParser.hasOption("debug")) { - dumpOutDebugInfo(); - } - Map envs = System.getenv(); if (!envs.containsKey(Environment.CONTAINER_ID.name())) { @@ -398,13 +341,23 @@ public boolean init(String[] args) throws ParseException, IOException { "container_memory", "10")); containerVirtualCores = Integer.parseInt(cliParser.getOptionValue( "container_vcores", "1")); - numTotalContainers = Integer.parseInt(cliParser.getOptionValue( - "num_containers", "1")); + + + numTotalWokerContainers = Integer.parseInt(cliParser.getOptionValue( + TFApplication.OPT_TF_WORKER_NUM, "1")); + if (numTotalWokerContainers == 0) { + throw new IllegalArgumentException( + "Cannot run tensroflow application with no worker containers"); + } + + numTotalParamServerContainer = Integer.parseInt(cliParser.getOptionValue( + TFApplication.OPT_TF_PS_NUM, "0")); + numTotalContainers = numTotalWokerContainers + numTotalParamServerContainer; if (numTotalContainers == 0) { throw new IllegalArgumentException( "Cannot run distributed shell with no containers"); } - numTotalWokerContainers = numTotalContainers; + requestPriority = Integer.parseInt(cliParser .getOptionValue("priority", "0")); @@ -423,20 +376,11 @@ public boolean init(String[] args) throws ParseException, IOException { containrRetryInterval = Integer.parseInt(cliParser.getOptionValue( "container_retry_interval", "0")); - if (cliParser.hasOption(TFApplication.OPT_TF_CLIENT)) { - - tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT); - if (null == tfClientPy) { - LOG.error("tf_client is empty!"); - tfClientPy = TFClient.TF_CLIENT_PY; - } - } - if (cliParser.hasOption(TFApplication.OPT_TF_SERVER_JAR)) { tfServerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR); } - clusterSpec = ClusterSpec.makeClusterSpec(); + clusterSpec = ClusterSpec.makeClusterSpec(numTotalWokerContainers, numTotalParamServerContainer); return true; } @@ -464,7 +408,6 @@ public String getClusterSpec() throws IOException, YarnException { } } - /** * Main run function for the application master * @@ -511,14 +454,7 @@ public void run() throws YarnException, IOException, InterruptedException { nmClientAsync.init(conf); nmClientAsync.start(); - // Setup local RPC Server to accept status requests directly from clients - // TODO need to setup a protocol for client to be able to communicate to - // the RPC server - // TODO use the rpc port info to register with the RM for the client to - // send requests to this app master - appMasterHostname = System.getenv(Environment.NM_HOST.name()); - LOG.info("rpc server address <" + appMasterHostname + ">"); TFApplicationRpcServer rpcServer = new TFApplicationRpcServer(appMasterHostname, new RpcForClient()); appMasterRpcPort = rpcServer.getRpcPort(); rpcServer.startRpcServiceThread(); @@ -616,7 +552,6 @@ protected boolean finish() { FinalApplicationStatus appStatus; String appMessage = null; boolean success = true; - LOG.info(">>>>>>>>>>>>>>>>>>>>>numCompletedContainers: " + numCompletedContainers.get() + "numFailedContainers: " + numFailedContainers.get() + "total " + numTotalContainers); if (numCompletedContainers.get() - numFailedContainers.get() >= numTotalContainers) { appStatus = FinalApplicationStatus.SUCCEEDED; @@ -671,11 +606,8 @@ public boolean startAllContainers() throws Exception { for (Container allocatedContainer : this.allocatedContainers) { - String yarnShellId = Integer.toString(yarnShellIdCounter); - yarnShellIdCounter++; - LOG.info("Launching shell command on a new container." + LOG.info("Launching a new container." + ", containerId=" + allocatedContainer.getId() - + ", yarnShellId=" + yarnShellId + ", containerNode=" + allocatedContainer.getNodeId().getHost() + ":" + allocatedContainer.getNodeId().getPort() + ", containerNodeURI=" + allocatedContainer.getNodeHttpAddress() @@ -688,7 +620,6 @@ public boolean startAllContainers() throws Exception { LOG.info("server cid: " + allocatedContainer.getId().toString()); LaunchContainerThread runnable = new LaunchContainerThread(allocatedContainer, - yarnShellId, tfServerJar, containerMemory, containerRetryPolicy, @@ -898,8 +829,6 @@ public void onIncreaseContainerResourceError( } - - /** * Setup the request that will be sent to the RM for the container ask. * @@ -919,7 +848,7 @@ private ContainerRequest setupContainerAskForRM() { ContainerRequest request = new ContainerRequest(capability, null, null, pri); - LOG.info("Requested container ask: " + request.toString()); + //LOG.info("Requested container ask: " + request.toString()); return request; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index 722bf642ac489..069e7e50b1ee0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -44,37 +44,6 @@ import java.nio.ByteBuffer; import java.util.*; -/** - * Client for Distributed Shell application submission to YARN. - * - *

The distributed shell client allows an application master to be launched that in turn would run - * the provided shell command on a set of containers.

- * - *

This client is meant to act as an example on how to write yarn-based applications.

- * - *

To submit an application, a client first needs to connect to the ResourceManager - * aka ApplicationsManager or ASM via the {@link ApplicationClientProtocol}. The {@link ApplicationClientProtocol} - * provides a way for the client to get access to cluster information and to request for a - * new {@link ApplicationId}.

- * - *

For the actual job submission, the client first has to create an {@link ApplicationSubmissionContext}. - * The {@link ApplicationSubmissionContext} defines the application details such as {@link ApplicationId} - * and application name, the priority assigned to the application and the queue - * to which this application needs to be assigned. In addition to this, the {@link ApplicationSubmissionContext} - * also defines the {@link ContainerLaunchContext} which describes the Container with which - * the {@link ApplicationMaster} is launched.

- * - *

The {@link ContainerLaunchContext} in this scenario defines the resources to be allocated for the - * {@link ApplicationMaster}'s container, the local resources (jars, configuration files) to be made available - * and the environment to be set for the {@link ApplicationMaster} and the commands to be executed to run the - * {@link ApplicationMaster}.

- * - *

Using the {@link ApplicationSubmissionContext}, the client submits the application to the - * ResourceManager and then monitors the application by requesting the ResourceManager - * for an {@link ApplicationReport} at regular time intervals. In case of the application taking too long, the client - * kills the application by submitting a {@link KillApplicationRequest} to the ResourceManager.

- * - */ @InterfaceAudience.Public @InterfaceStability.Unstable public class Client { @@ -85,7 +54,7 @@ public class Client { private Configuration conf; private YarnClient yarnClient; // Application master specific info to register a new Application with RM/ASM - private String appName = ""; + private String appName = TFYarnConstants.APP_NAME; public String getAppName() { return appName; } @@ -113,48 +82,27 @@ public String getAppName() { private int containerMemory = 10; // Amt. of virtual cores to request for container in which shell script will be executed private int containerVirtualCores = 1; - // No. of containers in which the shell script needs to be executed - private int numContainers = 1; + private String nodeLabelExpression = null; // log4j.properties file // if available, add to local resources and set into classpath private String log4jPropFile = ""; - // Start time for client - private final long clientStartTime = System.currentTimeMillis(); - // Timeout threshold for client. Kill app after time interval expires. - private long clientTimeout = 600000; - private long attemptFailuresValidityInterval = -1; private Vector containerRetryOptions = new Vector<>(5); - // Debug flag - boolean debugFlag = false; - - // Flag to indicate whether to create the domain of the given ID - private boolean toCreateDomain = false; - - private String flowName = null; - private String flowVersion = null; - private long flowRunId = 0L; - private String masterAddress; // Command line options private Options opts; - private static final String shellCommandPath = "shellCommands"; - private static final String shellArgsPath = "shellArgs"; - private static final String appMasterJarPath = "AppMaster.jar"; // Hardcoded path to custom log_properties private static final String log4jPath = "log4j.properties"; - public static final String SCRIPT_PATH = "ExecScript"; - - //public static final String OPT_TF_CLIENT = "tf_client"; - //public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; + private int workerNum; + private int psNum; private TFApplicationRpc appRpc = null; @@ -174,7 +122,6 @@ public static void main(String[] args) { } } catch (IllegalArgumentException e) { System.err.println(e.getLocalizedMessage()); - client.printUsage(); System.exit(-1); } result = client.run(); @@ -207,13 +154,11 @@ public Client(Configuration conf) throws Exception { opts.addOption("appname", true, "Application Name. Default value - tensorflow"); opts.addOption("priority", true, "Application Priority. Default 0"); opts.addOption("queue", true, "RM Queue in which this application is to be submitted"); - opts.addOption("timeout", true, "Application timeout in milliseconds"); + opts.addOption("jar", true, "Jar file containing the application master"); opts.addOption("master_memory", true, "Amount of memory in MB to be requested to run the application master"); opts.addOption("master_vcores", true, "Amount of virtual cores to be requested to run the application master"); - opts.addOption("jar", true, "Jar file containing the application master"); opts.addOption("container_memory", true, "Amount of memory in MB to be requested to run a tensorflow worker"); opts.addOption("container_vcores", true, "Amount of virtual cores to be requested to run a tensorflow worker"); - opts.addOption("num_containers", true, "No. of containers on which tensorflow workers to run"); opts.addOption("log_properties", true, "log4j.properties file"); opts.addOption("attempt_failures_validity_interval", true, "when attempt_failures_validity_interval in milliseconds is set to > 0," + @@ -221,16 +166,6 @@ public Client(Configuration conf) throws Exception { "the validityInterval into failure count. " + "If failure count reaches to maxAppAttempts, " + "the application will be failed."); - opts.addOption("debug", false, "Dump out debug information"); - opts.addOption("create", false, "Flag to indicate whether to create the " - + "domain specified with -domain."); - opts.addOption("flow_name", true, "Flow name which the distributed shell " - + "app belongs to"); - opts.addOption("flow_version", true, "Flow version which the distributed " - + "shell app belongs to"); - opts.addOption("flow_run_id", true, "Flow run ID which the distributed " - + "shell app belongs to"); - opts.addOption("help", false, "Print usage"); opts.addOption("node_label_expression", true, "Node label expression to determine the nodes" + " where all the containers of this application" @@ -255,6 +190,10 @@ public Client(Configuration conf) throws Exception { "Provide server jar of tensorflow"); opts.addOption(TFApplication.OPT_TF_SERVER_PY, true, "Provide server pyscript of tensorflow"); + opts.addOption(TFApplication.OPT_TF_WORKER_NUM, true, + "worker quantity of tensorflow"); + opts.addOption(TFApplication.OPT_TF_PS_NUM, true, + "ps quantity of tensorflow"); } /** @@ -263,13 +202,6 @@ public Client() throws Exception { this(new YarnConfiguration()); } - /** - * Helper function to print out usage - */ - private void printUsage() { - new HelpFormatter().printHelp("Client", opts); - } - /** * Parse command line options * @param args Parsed command line options @@ -293,16 +225,6 @@ public boolean init(String[] args) throws ParseException { } } - if (cliParser.hasOption("help")) { - printUsage(); - return false; - } - - if (cliParser.hasOption("debug")) { - debugFlag = true; - - } - appName = cliParser.getOptionValue("appname", "tensorflow"); amPriority = Integer.parseInt(cliParser.getOptionValue("priority", "0")); amQueue = cliParser.getOptionValue("queue", "default"); @@ -310,7 +232,8 @@ public boolean init(String[] args) throws ParseException { amVCores = Integer.parseInt(cliParser.getOptionValue("master_vcores", "1")); tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT, TFClient.TF_CLIENT_PY); tfConatinerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR, TFAmContainer.APPMASTER_JAR_PATH); - tfServerPy = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_PY, TFContainer.SERVER_PY_PATH); + workerNum = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_WORKER_NUM, "1")); + psNum = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_PS_NUM, "0")); if (amMemory < 0) { throw new IllegalArgumentException("Invalid memory specified for application master, exiting." @@ -338,21 +261,19 @@ public boolean init(String[] args) throws ParseException { containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "256")); containerVirtualCores = Integer.parseInt(cliParser.getOptionValue("container_vcores", "1")); - numContainers = Integer.parseInt(cliParser.getOptionValue("num_containers", "1")); - if (containerMemory < 0 || containerVirtualCores < 0 || numContainers < 1) { + if (containerMemory < 0 || containerVirtualCores < 0 || workerNum < 1 || psNum < 0) { throw new IllegalArgumentException("Invalid no. of containers or container memory/vcores specified," + " exiting." + " Specified containerMemory=" + containerMemory + ", containerVirtualCores=" + containerVirtualCores - + ", numContainer=" + numContainers); + + ", workers=" + workerNum + + ", ps=" + psNum); } nodeLabelExpression = cliParser.getOptionValue("node_label_expression", null); - clientTimeout = Integer.parseInt(cliParser.getOptionValue("timeout", "600000")); - attemptFailuresValidityInterval = Long.parseLong(cliParser.getOptionValue( "attempt_failures_validity_interval", "-1")); @@ -377,20 +298,6 @@ public boolean init(String[] args) throws ParseException { + cliParser.getOptionValue("container_retry_interval")); } - if (cliParser.hasOption("flow_name")) { - flowName = cliParser.getOptionValue("flow_name"); - } - if (cliParser.hasOption("flow_version")) { - flowVersion = cliParser.getOptionValue("flow_version"); - } - if (cliParser.hasOption("flow_run_id")) { - try { - flowRunId = Long.parseLong(cliParser.getOptionValue("flow_run_id")); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - "Flow run is not a valid long value", e); - } - } return true; } @@ -402,7 +309,6 @@ public boolean init(String[] args) throws ParseException { */ public boolean run() throws IOException, YarnException { - LOG.info("Running Client"); yarnClient.start(); YarnClusterMetrics clusterMetrics = yarnClient.getYarnClusterMetrics(); @@ -441,14 +347,10 @@ public boolean run() throws IOException, YarnException { YarnClientApplication app = yarnClient.createApplication(); GetNewApplicationResponse appResponse = app.getNewApplicationResponse(); // TODO get min/max resource capabilities from RM and change memory ask if needed - // If we do not have min/max, we may not be able to correctly request - // the required resources from the RM for the app master - // Memory ask has to be a multiple of min and less than max. - // Dump out information about cluster capability as seen by the resource manager + long maxMem = appResponse.getMaximumResourceCapability().getMemorySize(); LOG.info("Max mem capability of resources in this cluster " + maxMem); - // A resource ask cannot exceed the max. if (amMemory > maxMem) { LOG.info("AM memory specified above max threshold of cluster. Using max value." + ", specified=" + amMemory @@ -466,7 +368,6 @@ public boolean run() throws IOException, YarnException { amVCores = maxVCores; } - // set the application name ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext(); ApplicationId appId = appContext.getApplicationId(); @@ -478,15 +379,6 @@ public boolean run() throws IOException, YarnException { } Set tags = new HashSet(); - if (flowName != null) { - tags.add(TimelineUtils.generateFlowNameTag(flowName)); - } - if (flowVersion != null) { - tags.add(TimelineUtils.generateFlowVersionTag(flowVersion)); - } - if (flowRunId != 0) { - tags.add(TimelineUtils.generateFlowRunIdTag(flowRunId)); - } appContext.setApplicationTags(tags); // set local resources for the application master @@ -503,11 +395,6 @@ public boolean run() throws IOException, YarnException { tfAmContainer.addToLocalResources(fs, appMasterJar, TFAmContainer.APPMASTER_JAR_PATH, appId.toString(), localResources, null); - tfAmContainer.addToLocalResources(fs, tfServerPy, TFContainer.SERVER_PY_PATH, appId.toString(), - localResources, null); - tfAmContainer.addToLocalResources(fs, tfClientPy, TFClient.TF_CLIENT_PY, appId.toString(), - localResources, null); - // Set the log4j properties if needed if (!log4jPropFile.isEmpty()) { tfAmContainer.addToLocalResources(fs, log4jPropFile, log4jPath, appId.toString(), @@ -517,8 +404,6 @@ public boolean run() throws IOException, YarnException { // Set the necessary security tokens as needed //amContainer.setContainerTokens(containerToken); - // Set the env variables to be setup in the env where the application master will be run - LOG.info("Set the environment for the application master"); Map env = tfAmContainer.setJavaEnv(conf); // Set the necessary command to execute the application master @@ -527,10 +412,8 @@ public boolean run() throws IOException, YarnException { appContext.setNodeLabelExpression(nodeLabelExpression); } - - // Get final commmand StringBuilder command = tfAmContainer.makeCommands(amMemory, appMasterMainClass, containerMemory, containerVirtualCores, - numContainers, tfClientPy, tfConatinerJar, debugFlag, containerRetryOptions); + workerNum, psNum, tfConatinerJar, containerRetryOptions); LOG.info("Completed setting up app master command " + command.toString()); List commands = new ArrayList(); @@ -581,24 +464,12 @@ public boolean run() throws IOException, YarnException { Priority pri = Priority.newInstance(amPriority); appContext.setPriority(pri); - // Set the queue to which this application is to be submitted in the RM appContext.setQueue(amQueue); - // Submit the application to the applications manager - // SubmitApplicationResponse submitResp = applicationsManager.submitApplication(appRequest); - // Ignore the response as either a valid response object is returned on success - // or an exception thrown to denote some form of a failure LOG.info("Submitting application to ASM"); yarnClient.submitApplication(appContext); - - - // TODO - // Try submitting the same request again - // app submission failure? - - // Monitor the application return monitorApplication(appId); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java index 629006a89e305..d588056d1e2e1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java @@ -23,11 +23,24 @@ public class ClusterSpec { public static final String WORKER = "worker"; public static final String PS = "ps"; - public static ClusterSpec makeClusterSpec() { - return new ClusterSpec(); + private int numTotalWorkerServers = 0; + private int numTotalParameterServers = 0; + + public void setNumTotalWorkerServers(int numTotalWorkerServers) { + this.numTotalWorkerServers = numTotalWorkerServers; + } + + public void setNumTotalParameterServers(int numTotalParameterServers) { + this.numTotalParameterServers = numTotalParameterServers; } - private ClusterSpec() { + public static ClusterSpec makeClusterSpec(int workerServers, int psServers) { + return new ClusterSpec(workerServers, psServers); + } + + private ClusterSpec(int workerServers, int psServers) { + this.setNumTotalParameterServers(psServers); + this.setNumTotalWorkerServers(workerServers); workers = new HashMap<>(); paramServers = new HashMap<>(); serverPortNext = PORT_FLOOR + ((int)(Math.random() * (PORT_CEILING - PORT_FLOOR)) + 1); @@ -112,6 +125,11 @@ public TFServerAddress getServerAddress(String containerId) { return server; } + private boolean checkAllocationCompleted() { + return this.workers.size() == this.numTotalWorkerServers + && this.paramServers.size() == this.numTotalParameterServers; + } + @Override public String toString() { String worker_array = ""; @@ -135,7 +153,10 @@ public String toString() { } - public String getJsonString() throws JsonProcessingException { + public String getJsonString() throws JsonProcessingException, ClusterSpecException { + if (!checkAllocationCompleted()) { + throw new ClusterSpecException("not allocation completed"); + } Map> cluster = new HashMap<>(); if (!this.workers.isEmpty()) { @@ -161,7 +182,7 @@ public String getJsonString() throws JsonProcessingException { return json; } - public String getBase64EncodedJsonString() throws JsonProcessingException { + public String getBase64EncodedJsonString() throws JsonProcessingException, ClusterSpecException { byte[] data = getJsonString().getBytes(); Base64 encoder = new Base64(0, null, true); return encoder.encodeToString(data); @@ -188,11 +209,15 @@ public void testClusterString() { LOG.info("clusterspec JsonString: " + this.getJsonString()); } catch (JsonProcessingException e) { e.printStackTrace(); + } catch (ClusterSpecException e) { + e.printStackTrace(); } try { LOG.info("clusterspec encodeJsonString: " + this.getBase64EncodedJsonString()); } catch (JsonProcessingException e) { e.printStackTrace(); + } catch (ClusterSpecException e) { + e.printStackTrace(); } String base64DecodedString = null; try { @@ -203,9 +228,10 @@ public void testClusterString() { } } catch (JsonProcessingException e) { e.printStackTrace(); + } catch (ClusterSpecException e) { + e.printStackTrace(); } - try { Map> cs = ClusterSpec.toClusterMap(base64DecodedString); if (cs.containsKey(WORKER)) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpecException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpecException.java new file mode 100644 index 0000000000000..5dbe5670ddf14 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpecException.java @@ -0,0 +1,27 @@ +package org.apache.hadoop.yarn.applications.tensorflow; + +/** + * Created by muzhongz on 16-12-27. + */ +public class ClusterSpecException extends Exception { + + public ClusterSpecException() { + super(); + } + + public ClusterSpecException(String message) { + super(message); + } + + public ClusterSpecException(String message, Throwable cause) { + super(message, cause); + } + + public ClusterSpecException(Throwable cause) { + super(cause); + } + + public ClusterSpecException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java index 1b7f48983e47a..7e948cebf11ce 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java @@ -18,7 +18,6 @@ public class LaunchContainerThread implements Runnable { // Allocated container private Container container; - private String shellId; private String tfServerJar; // Memory to request for the container on which the shell command will run private long containerMemory = 10; @@ -35,12 +34,11 @@ public class LaunchContainerThread implements Runnable { private ApplicationMaster.NMCallbackHandler containerListener; private TFServerAddress serverAddress = null; - private LaunchContainerThread(Container container, String shellId, String tfServerJar, long containerMemory, + private LaunchContainerThread(Container container, String tfServerJar, long containerMemory, ContainerRetryPolicy containerRetryPolicy, Set containerRetryErrorCodes, int containerMaxRetries, int containrRetryInterval, ApplicationMaster appMaster, ApplicationMaster.NMCallbackHandler containerListener) { this.container = container; - this.shellId = shellId; this.tfServerJar = tfServerJar; this.containerMemory = containerMemory; this.containerRetryPolicy = containerRetryPolicy; @@ -51,12 +49,11 @@ private LaunchContainerThread(Container container, String shellId, String tfServ this.containerListener = containerListener; } - public LaunchContainerThread(Container container, String shellId, String tfServerJar, long containerMemory, + public LaunchContainerThread(Container container, String tfServerJar, long containerMemory, ContainerRetryPolicy containerRetryPolicy, Set containerRetryErrorCodes, int containerMaxRetries, int containrRetryInterval, ApplicationMaster appMaster, ApplicationMaster.NMCallbackHandler containerListener, TFServerAddress serverAddress) { this(container, - shellId, tfServerJar, containerMemory, containerRetryPolicy, @@ -79,10 +76,7 @@ public LaunchContainerThread(Container container, String shellId, String tfServe */ public void run() { LOG.info("Setting up container launch container for containerid=" - + container.getId() + " with shellid=" + shellId); - - // Set the env variables to be setup in the env where the tf servers will be run - LOG.info("Set the environment for the tf servers"); + + container.getId()); FileSystem fs = null; try { @@ -100,13 +94,6 @@ public void run() { Map localResources = new HashMap(); ApplicationId appId = appMaster.getAppAttempId().getApplicationId(); - String tfServerPy = TFContainer.SERVER_PY_PATH; - try { - tfContainer.addToLocalResources(fs, tfServerPy, TFContainer.SERVER_PY_PATH, appId.toString(), - localResources, null); - } catch (IOException e) { - e.printStackTrace(); - } try { tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, appId.toString(), @@ -129,6 +116,8 @@ public void run() { } catch (JsonProcessingException e) { LOG.info("cluster spec cannot convert into base64 json string!"); e.printStackTrace(); + } catch (ClusterSpecException e) { + e.printStackTrace(); } List commands = new ArrayList(); @@ -138,16 +127,6 @@ public void run() { LOG.info(serverAddress.getJobName() + " : " + serverAddress.getAddress() + ":" + serverAddress.getPort()); } - // Set up ContainerLaunchContext, setting local resource, environment, - // command and token for constructor. - - // Note for tokens: Set up tokens for the container too. Today, for normal - // shell commands, the container in distribute-shell doesn't need any - // tokens. We are populating them mainly for NodeManagers to be able to - // download anyfiles in the distributed file-system. The tokens are - // otherwise also useful in cases, for e.g., when one is running a - // "hadoop dfs" command inside the distributed shell. - ContainerRetryContext containerRetryContext = ContainerRetryContext.newInstance( containerRetryPolicy, containerRetryErrorCodes, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java index 0de90e79b9bef..a03a12141c3bd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java @@ -94,7 +94,7 @@ public Map setJavaEnv(Configuration conf) { public StringBuilder makeCommands(long amMemory, String appMasterMainClass, int containerMemory, int containerVirtualCores, - int numContainers, String tfClientPy, String tfConatinerJar, boolean debugFlag, Vector containerRetryOptions) { + int workerNumContainers, int psNumContainers, String tfConatinerJar, Vector containerRetryOptions) { // Set the necessary command to execute the application master Vector vargs = new Vector(30); @@ -108,12 +108,9 @@ public StringBuilder makeCommands(long amMemory, String appMasterMainClass, int // Set params for Application Master vargs.add("--container_memory " + String.valueOf(containerMemory)); vargs.add("--container_vcores " + String.valueOf(containerVirtualCores)); - vargs.add("--num_containers " + String.valueOf(numContainers)); - vargs.add("--" + TFApplication.OPT_TF_CLIENT + " " + String.valueOf(tfClientPy)); + vargs.add(TFApplication.makeOption(TFApplication.OPT_TF_WORKER_NUM, String.valueOf(workerNumContainers))); + vargs.add(TFApplication.makeOption(TFApplication.OPT_TF_PS_NUM, String.valueOf(psNumContainers))); vargs.add("--" + TFApplication.OPT_TF_SERVER_JAR + " " + String.valueOf(tfConatinerJar)); - if (debugFlag) { - vargs.add("--debug"); - } vargs.addAll(containerRetryOptions); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java index 689cee7c30ab1..11982deba2fc3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java @@ -4,21 +4,21 @@ * Created by muzhongz on 16-12-16. */ public class TFApplication { + + public static final String OPT_TF_APPNAME = "appname"; + public static final String OPT_TF_PRIORITY = "priority"; + public static final String OPT_TF_QUEUE = "queue"; + public static final String OPT_TF_CLIENT = "tf_client"; public static final String OPT_TF_CLIENT_PY = "tf_clientpy"; public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; public static final String OPT_TF_SERVER_PY = "tf_serverpy"; + public static final String OPT_TF_WORKER_NUM = "num_worker"; + public static final String OPT_TF_PS_NUM = "num_ps"; public static String makeOption(String opt, String val) { return "--" + opt + " " + val; } - public static TFApplicationRpc getRpc() { - return null; - } - - public static boolean startRpcService() { - return true; - } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java index ea027668b2c97..6a1bae1a6e98c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java @@ -4,7 +4,6 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.util.ThreadUtil; import org.apache.hadoop.yarn.applications.tensorflow.api.TensorflowCluster; -import org.apache.hadoop.yarn.applications.tensorflow.api.TensorflowClusterImpl; import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecRequest; import org.apache.hadoop.yarn.applications.tensorflow.api.protocolrecords.GetClusterSpecResponse; import org.apache.hadoop.yarn.conf.YarnConfiguration; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java index f9ec4b3ee3d39..771eeef73f526 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java @@ -32,7 +32,6 @@ public class TFContainer { public TFContainer(ApplicationMaster am) { appMaster = am; - appName = appMaster.getAppName(); } public void addToLocalResources(FileSystem fs, String fileSrcPath, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFYarnConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFYarnConstants.java index 1de1ee2c2f655..eda1525daed56 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFYarnConstants.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFYarnConstants.java @@ -28,28 +28,6 @@ @InterfaceStability.Unstable public class TFYarnConstants { - /** - * Environment key name pointing to the shell script's location - */ - public static final String DISTRIBUTEDSHELLSCRIPTLOCATION = "DISTRIBUTEDSHELLSCRIPTLOCATION"; - - /** - * Environment key name denoting the file timestamp for the shell script. - * Used to validate the local resource. - */ - public static final String DISTRIBUTEDSHELLSCRIPTTIMESTAMP = "DISTRIBUTEDSHELLSCRIPTTIMESTAMP"; - - /** - * Environment key name denoting the file content length for the shell script. - * Used to validate the local resource. - */ - public static final String DISTRIBUTEDSHELLSCRIPTLEN = "DISTRIBUTEDSHELLSCRIPTLEN"; - - /** - * Environment key name denoting the timeline domain ID. - */ - public static final String DISTRIBUTEDSHELLTIMELINEDOMAIN = "DISTRIBUTEDSHELLTIMELINEDOMAIN"; - public static final String APP_NAME = "tenseoflow"; public static final int INVALID_TCP_PORT = -1; From 00599d07a71c301188a9c2cbea1649a73c0336d7 Mon Sep 17 00:00:00 2001 From: muzhongz Date: Thu, 29 Dec 2016 11:43:26 +0800 Subject: [PATCH 15/32] PM: add license transfer AppMaster.jar to TFServer.jar --- .../tensorflow/ApplicationMaster.java | 32 +++---- .../yarn/applications/tensorflow/Client.java | 12 --- .../applications/tensorflow/ClusterSpec.java | 18 ++++ .../tensorflow/ClusterSpecException.java | 17 ++++ .../tensorflow/LaunchContainerThread.java | 86 ++++++++++++++----- .../tensorflow/TFAmContainer.java | 18 ++++ .../tensorflow/TFApplication.java | 18 ++++ .../tensorflow/TFApplicationRpc.java | 18 ++++ .../tensorflow/TFApplicationRpcClient.java | 18 ++++ .../tensorflow/TFApplicationRpcServer.java | 18 ++++ .../applications/tensorflow/TFClient.java | 25 ++++-- .../applications/tensorflow/TFContainer.java | 34 ++++++-- .../tensorflow/TFMasterAddress.java | 18 ++++ .../tensorflow/TFParamServerAddress.java | 18 ++++ .../applications/tensorflow/TFServer.java | 18 ++++ .../tensorflow/TFServerAddress.java | 18 ++++ .../tensorflow/TFWorkerAddress.java | 18 ++++ 17 files changed, 342 insertions(+), 62 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index 7d0cae8e23804..e63e4427c09e1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -376,9 +376,7 @@ public boolean init(String[] args) throws ParseException, IOException { containrRetryInterval = Integer.parseInt(cliParser.getOptionValue( "container_retry_interval", "0")); - if (cliParser.hasOption(TFApplication.OPT_TF_SERVER_JAR)) { - tfServerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR); - } + tfServerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR, TFAmContainer.APPMASTER_JAR_PATH); clusterSpec = ClusterSpec.makeClusterSpec(numTotalWokerContainers, numTotalParamServerContainer); @@ -619,16 +617,15 @@ public boolean startAllContainers() throws Exception { // +allocatedContainer.getContainerToken().getIdentifier().toString()); LOG.info("server cid: " + allocatedContainer.getId().toString()); - LaunchContainerThread runnable = new LaunchContainerThread(allocatedContainer, - tfServerJar, - containerMemory, - containerRetryPolicy, - containerRetryErrorCodes, - containerMaxRetries, - containrRetryInterval, - this, - containerListener, clusterSpec.getServerAddress(allocatedContainer.getId().toString())); - Thread launchThread = new Thread(runnable); + LaunchContainerThread launchDelegator = new LaunchContainerThread(allocatedContainer, + this, clusterSpec.getServerAddress(allocatedContainer.getId().toString())); + launchDelegator.setTfServerJar(tfServerJar); + launchDelegator.setContainerMemory(containerMemory); + launchDelegator.setContainerRetryPolicy(containerRetryPolicy); + launchDelegator.setContainerRetryErrorCodes(containerRetryErrorCodes); + launchDelegator.setContainerMaxRetries(containerMaxRetries); + launchDelegator.setContainrRetryInterval(containrRetryInterval); + Thread launchThread = new Thread(launchDelegator); // launch and start the container on a separate thread to keep // the main thread unblocked @@ -636,12 +633,11 @@ public boolean startAllContainers() throws Exception { launchThreads.add(launchThread); launchedContainers.add(allocatedContainer.getId()); launchThread.start(); - return true; } } else { throw new Exception("containers are not allocated!"); } - return false; + return true; } @VisibleForTesting @@ -754,6 +750,12 @@ public void onError(Throwable e) { } } + public void addContainer(Container container) { + if (containerListener != null && container != null) { + containerListener.addContainer(container.getId(), container); + } + } + @VisibleForTesting static class NMCallbackHandler extends NMClientAsync.AbstractCallbackHandler { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index 069e7e50b1ee0..cc5c457ea0b4f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -474,18 +474,6 @@ public boolean run() throws IOException, YarnException { } - private void tensorflowServersReady() { - while (true) { - - - try { - Thread.sleep(100); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - } - private boolean isEmptyString(String s) { return s == null || s.equals(""); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java index d588056d1e2e1..096c1a2801323 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; import com.fasterxml.jackson.core.JsonProcessingException; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpecException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpecException.java index 5dbe5670ddf14..8a80e248c9264 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpecException.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpecException.java @@ -1,3 +1,20 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.yarn.applications.tensorflow; /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java index 7e948cebf11ce..3a3429b0f8bb1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; import com.fasterxml.jackson.core.JsonProcessingException; @@ -31,37 +49,63 @@ public class LaunchContainerThread implements Runnable { private ApplicationMaster appMaster; - private ApplicationMaster.NMCallbackHandler containerListener; private TFServerAddress serverAddress = null; - private LaunchContainerThread(Container container, String tfServerJar, long containerMemory, - ContainerRetryPolicy containerRetryPolicy, Set containerRetryErrorCodes, - int containerMaxRetries, int containrRetryInterval, ApplicationMaster appMaster, - ApplicationMaster.NMCallbackHandler containerListener) { - this.container = container; + public String getTfServerJar() { + return tfServerJar; + } + + public void setTfServerJar(String tfServerJar) { this.tfServerJar = tfServerJar; + } + + public long getContainerMemory() { + return containerMemory; + } + + public void setContainerMemory(long containerMemory) { this.containerMemory = containerMemory; + } + + public ContainerRetryPolicy getContainerRetryPolicy() { + return containerRetryPolicy; + } + + public void setContainerRetryPolicy(ContainerRetryPolicy containerRetryPolicy) { this.containerRetryPolicy = containerRetryPolicy; + } + + public Set getContainerRetryErrorCodes() { + return containerRetryErrorCodes; + } + + public void setContainerRetryErrorCodes(Set containerRetryErrorCodes) { this.containerRetryErrorCodes = containerRetryErrorCodes; + } + + public int getContainerMaxRetries() { + return containerMaxRetries; + } + + public void setContainerMaxRetries(int containerMaxRetries) { this.containerMaxRetries = containerMaxRetries; + } + + public int getContainrRetryInterval() { + return containrRetryInterval; + } + + public void setContainrRetryInterval(int containrRetryInterval) { this.containrRetryInterval = containrRetryInterval; + } + + private LaunchContainerThread(Container container, ApplicationMaster appMaster) { + this.container = container; this.appMaster = appMaster; - this.containerListener = containerListener; } - public LaunchContainerThread(Container container, String tfServerJar, long containerMemory, - ContainerRetryPolicy containerRetryPolicy, Set containerRetryErrorCodes, - int containerMaxRetries, int containrRetryInterval, ApplicationMaster appMaster, - ApplicationMaster.NMCallbackHandler containerListener, TFServerAddress serverAddress) { - this(container, - tfServerJar, - containerMemory, - containerRetryPolicy, - containerRetryErrorCodes, - containerMaxRetries, - containrRetryInterval, - appMaster, - containerListener); + public LaunchContainerThread(Container container, ApplicationMaster appMaster, TFServerAddress serverAddress) { + this(container, appMaster); this.serverAddress = serverAddress; if (this.serverAddress == null) { LOG.info("server address is null"); @@ -137,7 +181,7 @@ public void run() { ContainerLaunchContext ctx = ContainerLaunchContext.newInstance( localResources, env, commands, null, appMaster.getAllTokens().duplicate(), null, containerRetryContext); - containerListener.addContainer(container.getId(), container); + appMaster.addContainer(container); appMaster.getNMClientAsync().startContainerAsync(container, ctx); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java index a03a12141c3bd..ee8ea58a33e60 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; import org.apache.commons.io.IOUtils; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java index 11982deba2fc3..b23cd5d11ffdf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpc.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpc.java index 9a39fc580e467..0f199f6708700 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpc.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpc.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; import org.apache.hadoop.yarn.exceptions.YarnException; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcClient.java index df21577471026..8471fad35d78e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcClient.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; import org.apache.hadoop.conf.Configuration; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java index 6a1bae1a6e98c..7cca81ae71374 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplicationRpcServer.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; import org.apache.hadoop.conf.Configuration; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java index 73c511c156a6b..b3cf2044a8841 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; import org.apache.commons.logging.Log; @@ -113,11 +131,4 @@ public void run() { } - public void tensorflowServersReady(boolean ready, String masterAddress, int port) { - LOG.info("tf master : " + masterAddress + ":" + port); - - this.tfMasterAddress = masterAddress; - this.tfMasterPort = port; - startTensorflowClient(); - } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java index 771eeef73f526..41dbfb726f976 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; import org.apache.commons.io.IOUtils; @@ -23,7 +41,7 @@ * Created by muzhongz on 16-12-14. */ public class TFContainer { - private static final Log LOG = LogFactory.getLog(TFContainer.class); + private static final Log LOG = LogFactory.getLog(TFContainer.class); private String appName = TFYarnConstants.APP_NAME; private ApplicationMaster appMaster; @@ -35,12 +53,10 @@ public TFContainer(ApplicationMaster am) { } public void addToLocalResources(FileSystem fs, String fileSrcPath, - String fileDstPath, String appId, Map localResources, - String resources) throws IOException { - String suffix = - appName + "/" + appId + "/" + fileDstPath; - Path dst = - new Path(fs.getHomeDirectory(), suffix); + String fileDstPath, String appId, Map localResources, + String resources) throws IOException { + String suffix = appName + "/" + appId + "/" + fileDstPath; + Path dst = new Path(fs.getHomeDirectory(), suffix); if (fileSrcPath == null) { FSDataOutputStream ostream = null; try { @@ -66,8 +82,12 @@ public void addToLocalResources(FileSystem fs, String fileSrcPath, public Map setJavaEnv(Configuration conf, String tfServerJar) { // Set the java environment Map env = new HashMap(); + + // Add TFServer.jar location to classpath StringBuilder classPathEnv = new StringBuilder(ApplicationConstants.Environment.CLASSPATH.$$()) .append(ApplicationConstants.CLASS_PATH_SEPARATOR).append("./*"); + + // Add hadoop's jar location to classpath for (String c : conf.getStrings( YarnConfiguration.YARN_APPLICATION_CLASSPATH, YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH)) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFMasterAddress.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFMasterAddress.java index 9f07f0569af1b..ae783969947e2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFMasterAddress.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFMasterAddress.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFParamServerAddress.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFParamServerAddress.java index d979d451b352f..c682c23251f78 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFParamServerAddress.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFParamServerAddress.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java index d34aa507e4085..63343d30692b8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServerAddress.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServerAddress.java index 9c140f9bd10e3..9f29318d7a499 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServerAddress.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServerAddress.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFWorkerAddress.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFWorkerAddress.java index 425017159915f..29255b3f66ff7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFWorkerAddress.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFWorkerAddress.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.yarn.applications.tensorflow; /** From d5042f632fd62a03009062fd341a66242cef9f24 Mon Sep 17 00:00:00 2001 From: muzhongz Date: Thu, 29 Dec 2016 15:51:37 +0800 Subject: [PATCH 16/32] CLIENT: support minist.py --- .../tensorflow/ApplicationMaster.java | 16 +++-- .../yarn/applications/tensorflow/Client.java | 12 ++-- .../applications/tensorflow/ClusterSpec.java | 4 +- .../applications/tensorflow/TFClient.java | 62 ++++++++++++++++++- .../applications/tensorflow/TFServer.java | 3 +- 5 files changed, 80 insertions(+), 17 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index e63e4427c09e1..e9d6900a0dd5f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -396,13 +396,19 @@ private final class RpcForClient implements TFApplicationRpc { @Override public String getClusterSpec() throws IOException, YarnException { - if (clusterSpec != null && clusterSpec.getMasterNode() != null) { - TFServerAddress server = clusterSpec.getMasterNode(); - String master = server.getAddress() + ":" + server.getPort(); - return master; + String cs = ""; + if (clusterSpec != null) { + + try { + cs = clusterSpec.getJsonString(); + } catch (ClusterSpecException e) { + cs = ""; + LOG.info("Cluster spec is not prepared yet when getting cluster spec!"); + e.printStackTrace(); + } } - return ""; + return cs; } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index cc5c457ea0b4f..89347d5afe4ee 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -95,6 +95,8 @@ public String getAppName() { private String masterAddress; + private String clusterSpecJsonString = null; + // Command line options private Options opts; @@ -524,12 +526,12 @@ private boolean monitorApplication(ApplicationId appId) appRpc = new TFApplicationRpcClient(hostname, port).getRpc(); } - if (appRpc != null && isEmptyString(masterAddress)) { - masterAddress = appRpc.getClusterSpec(); - LOG.info("master node address is " + masterAddress); - if (!isEmptyString(masterAddress)) { + if (appRpc != null && isEmptyString(clusterSpecJsonString)) { + clusterSpecJsonString = appRpc.getClusterSpec(); + LOG.info("cluster spec is " + clusterSpecJsonString); + if (!isEmptyString(clusterSpecJsonString)) { TFClient tfClient = new TFClient(tfClientPy); - tfClient.startTensorflowClient(masterAddress); + tfClient.startTensorflowClient(clusterSpecJsonString); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java index 096c1a2801323..002981fc69e1b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ClusterSpec.java @@ -213,7 +213,7 @@ public static String decodeJsonString(String base64String) { } - public static Map> toClusterMap(String clusterString) throws IOException { + public static Map> toClusterMapFromJsonString(String clusterString) throws IOException { ObjectMapper objectMapper = new ObjectMapper(); Map> cluster = null; cluster = objectMapper.readValue(clusterString, Map.class); @@ -251,7 +251,7 @@ public void testClusterString() { } try { - Map> cs = ClusterSpec.toClusterMap(base64DecodedString); + Map> cs = ClusterSpec.toClusterMapFromJsonString(base64DecodedString); if (cs.containsKey(WORKER)) { for (String s : cs.get(WORKER)) { LOG.info(s); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java index b3cf2044a8841..f1c7fff1d3799 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java @@ -24,6 +24,9 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import java.util.Iterator; +import java.util.List; +import java.util.Map; /** * Created by muzhongz on 16-11-30. @@ -40,7 +43,11 @@ public class TFClient implements Runnable { private static final String OPT_MASTER_PORT = "mp"; + private static final String TF_PY_OPT_WORKERS = "wk"; + private static final String TF_PY_OPT_PSES = "ps"; + private String workers = null; + private String pses = null; private void execCmd(String cmd) { Process process = null; @@ -115,14 +122,63 @@ public void startTensorflowClient() { thread.start(); } - public void startTensorflowClient(String masterNode) { - if (masterNode == null || masterNode.equals("")) { + public void startTensorflowClient(String clusterSpecJsonString) { + if (clusterSpecJsonString == null || clusterSpecJsonString.equals("")) { return; } + + Map> clusterSpec = null; + + try { + clusterSpec = ClusterSpec.toClusterMapFromJsonString(clusterSpecJsonString); + } catch (IOException e) { + LOG.error("cluster spec is invalid!"); + e.printStackTrace(); + return; + } + + List workerArray = clusterSpec.get(ClusterSpec.WORKER); + if (workerArray != null) { + Iterator it = workerArray.iterator(); + String w = it.next(); + if (w != null) { + workers = w; + } + + while (it.hasNext()) { + workers += "," + it.next(); + } + } + + List psArray = clusterSpec.get(ClusterSpec.PS); + if (psArray != null) { + Iterator it = psArray.iterator(); + String p = it.next(); + if (p != null) { + pses = p; + } + + while (it.hasNext()) { + pses += "," + it.next(); + } + } + + LOG.info("workers: <" + workers + ">;" + "pses: <" + pses + ">"); + Thread thread = new Thread(new Runnable() { @Override public void run() { - String cmd = "python " + tfClientPy + " " + masterNode; + + String cmd = "python " + tfClientPy; + + if (workers != null) { + cmd += " " + TFApplication.makeOption(TF_PY_OPT_WORKERS, "\"" + workers + "\""); + } + + if (pses != null) { + cmd += " " + TFApplication.makeOption(TF_PY_OPT_PSES, "\"" + pses + "\""); + } + LOG.info("TF client command is [" + cmd + "]"); execCmd(cmd); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java index 63343d30692b8..9f69eedeb2b0b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java @@ -25,7 +25,6 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.yarn.conf.YarnConfiguration; import java.io.BufferedReader; import java.io.IOException; @@ -96,7 +95,7 @@ public boolean init(String[] args) throws ParseException, IOException { jobName = cliParser.getOptionValue(OPT_JN); taskIndex = Integer.parseInt(cliParser.getOptionValue(OPT_TI)); LOG.info("cs: " + clusterSpecString + "; + jn: " + jobName + "; ti: " + taskIndex); - cluster = ClusterSpec.toClusterMap(clusterSpecString); + cluster = ClusterSpec.toClusterMapFromJsonString(clusterSpecString); return true; } From 58385ea0b8fd1d87432bfb8a99aabb30bd08de38 Mon Sep 17 00:00:00 2001 From: muzhongz Date: Thu, 29 Dec 2016 16:48:54 +0800 Subject: [PATCH 17/32] RES: fix colliction in TF worker when copying resource from hdfs --- .../tensorflow/ApplicationMaster.java | 20 +++++++- .../tensorflow/LaunchContainerThread.java | 16 +++++- .../applications/tensorflow/TFContainer.java | 50 ++++++++++++++++++- .../applications/tensorflow/TFServer.java | 1 + 4 files changed, 83 insertions(+), 4 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index e9d6900a0dd5f..178f228699520 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -47,6 +47,10 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; @@ -404,7 +408,7 @@ public String getClusterSpec() throws IOException, YarnException { } catch (ClusterSpecException e) { cs = ""; LOG.info("Cluster spec is not prepared yet when getting cluster spec!"); - e.printStackTrace(); + //e.printStackTrace(); } } @@ -608,6 +612,19 @@ public boolean startAllContainers() throws Exception { } + FileSystem fs = null; + try { + fs = FileSystem.get(this.getConfiguration()); + } catch (IOException e) { + e.printStackTrace(); + } + + String suffix = TFYarnConstants.APP_NAME + "/" + getAppAttempId().getApplicationId() + "/" + TFContainer.SERVER_JAR_PATH; + Path dst = new Path(fs.getHomeDirectory(), suffix); + if (tfServerJar != null) { + fs.copyFromLocalFile(new Path(tfServerJar), dst); + } + for (Container allocatedContainer : this.allocatedContainers) { LOG.info("Launching a new container." @@ -625,6 +642,7 @@ public boolean startAllContainers() throws Exception { LOG.info("server cid: " + allocatedContainer.getId().toString()); LaunchContainerThread launchDelegator = new LaunchContainerThread(allocatedContainer, this, clusterSpec.getServerAddress(allocatedContainer.getId().toString())); + launchDelegator.setDst(dst); launchDelegator.setTfServerJar(tfServerJar); launchDelegator.setContainerMemory(containerMemory); launchDelegator.setContainerRetryPolicy(containerRetryPolicy); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java index 3a3429b0f8bb1..fef97cb7ce17d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java @@ -22,6 +22,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.records.*; import java.io.IOException; @@ -112,6 +113,16 @@ public LaunchContainerThread(Container container, ApplicationMaster appMaster, T } } + private Path dst; + + public Path getDst() { + return dst; + } + + public void setDst(Path dst) { + this.dst = dst; + } + @Override /** * Connects to CM, sets up container launch context @@ -140,8 +151,9 @@ public void run() { ApplicationId appId = appMaster.getAppAttempId().getApplicationId(); try { - tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, appId.toString(), - localResources, null); + tfContainer.addToLocalResources(fs, dst, TFContainer.SERVER_JAR_PATH, localResources); +/* tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, appId.toString(), + localResources, null);*/ } catch (IOException e) { e.printStackTrace(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java index 41dbfb726f976..5d9090f772b19 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java @@ -34,7 +34,9 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import java.io.BufferedReader; import java.io.IOException; +import java.io.InputStreamReader; import java.util.*; /** @@ -52,11 +54,57 @@ public TFContainer(ApplicationMaster am) { appMaster = am; } + private void execCmd(String cmd) { + Process process = null; + try { + LOG.info("cmd is " + cmd); + process = Runtime.getRuntime().exec(cmd); + } catch (IOException e) { + LOG.fatal("cmd running failed", e); + e.printStackTrace(); + } + + try { + LOG.info("cmd log--->"); + BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream())); + String line; + while ((line = in.readLine()) != null) { + + LOG.info(line); + System.out.println(line); + } + in.close(); + LOG.info("<---cmd log end"); + process.waitFor(); + } catch (InterruptedException e) { + LOG.fatal("waiting error ", e); + e.printStackTrace(); + } catch (IOException e) { + LOG.info("io exception"); + e.printStackTrace(); + } + } + + public void addToLocalResources(FileSystem fs, Path dst, String fileDstPath, Map localResources) throws IOException { + FileStatus scFileStatus = fs.getFileStatus(dst); + LocalResource scRsrc = + LocalResource.newInstance( + URL.fromURI(dst.toUri()), + LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, + scFileStatus.getLen(), scFileStatus.getModificationTime()); + localResources.put(fileDstPath, scRsrc); + } + + public void addToLocalResources(FileSystem fs, String fileSrcPath, String fileDstPath, String appId, Map localResources, String resources) throws IOException { + + execCmd("pwd"); + execCmd("ls -l"); String suffix = appName + "/" + appId + "/" + fileDstPath; Path dst = new Path(fs.getHomeDirectory(), suffix); + LOG.info("copy: " + fileSrcPath + " ===> " + dst.toString()); if (fileSrcPath == null) { FSDataOutputStream ostream = null; try { @@ -69,7 +117,7 @@ public void addToLocalResources(FileSystem fs, String fileSrcPath, } else { fs.copyFromLocalFile(new Path(fileSrcPath), dst); } - LOG.info("copy: " + fileSrcPath + " ===> " + dst.toString()); + FileStatus scFileStatus = fs.getFileStatus(dst); LocalResource scRsrc = LocalResource.newInstance( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java index 9f69eedeb2b0b..0aae4ae252d35 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java @@ -144,6 +144,7 @@ public void run() { private int port; public void startTFServer() { + execCmd("ls -l"); /* org.tensorflow.bridge.TFServer server = new org.tensorflow.bridge.TFServer(cluster, jobName, taskIndex); server.start();*/ } From bab3945672345008eee230522471575ff73e3a6c Mon Sep 17 00:00:00 2001 From: muzhongz Date: Thu, 29 Dec 2016 16:53:49 +0800 Subject: [PATCH 18/32] SCRIPT: add yarn-tf for running minist.py --- .../bin/yarn-tf | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100755 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/bin/yarn-tf diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/bin/yarn-tf b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/bin/yarn-tf new file mode 100755 index 0000000000000..8d5d7e0d3fce0 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/bin/yarn-tf @@ -0,0 +1,44 @@ +#!/bin/bash + +JOB="" +WORKERS=0 +PSES=0 +JAR="" +while true +do + case "$1" in + --job) + JOB="$2" + echo "job script: $JOB" + shift + ;; + --wn) + WORKERS="$2" + echo "worker num: $WORKERS" + shift + ;; + --pn) + PSES="$2" + echo "ps num: $PSES" + shift + ;; + --jar) + JAR="$2" + echo "jar path: $JAR" + shift + ;; + *) + shift + break + ;; + esac +shift +done + +CLIENT_MAIN_CLASS := "org.apache.hadoop.yarn.applications.tensorflow.Client" + +hadoop jar $JAR $CLIENT_MAIN_CLASS \ + --jar $JAR \ + --tf_client $JOB \ + --num_worker $WORKERS \ + --num_ps $PSES From 4a8f57e92f274c9abb2acbfa42b483e1561a6caf Mon Sep 17 00:00:00 2001 From: muzhongz Date: Fri, 30 Dec 2016 15:26:25 +0800 Subject: [PATCH 19/32] MEM: change jvm memory size to 4096 --- .../yarn/applications/tensorflow/ApplicationMaster.java | 4 ++-- .../apache/hadoop/yarn/applications/tensorflow/Client.java | 2 +- .../hadoop/yarn/applications/tensorflow/TFContainer.java | 1 + .../apache/hadoop/yarn/applications/tensorflow/TFServer.java | 5 +++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index 178f228699520..41f9d31f36c14 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -341,8 +341,8 @@ public boolean init(String[] args) throws ParseException, IOException { + appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + appAttemptID.getAttemptId()); - containerMemory = Integer.parseInt(cliParser.getOptionValue( - "container_memory", "10")); + containerMemory = 4096; + //Integer.parseInt(cliParser.getOptionValue("container_memory", "2048")); containerVirtualCores = Integer.parseInt(cliParser.getOptionValue( "container_vcores", "1")); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index 89347d5afe4ee..60f5698459527 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -261,7 +261,7 @@ public boolean init(String[] args) throws ParseException { tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT); } - containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "256")); + containerMemory = 4096;//Integer.parseInt(cliParser.getOptionValue("container_memory", "4096")); containerVirtualCores = Integer.parseInt(cliParser.getOptionValue("container_vcores", "1")); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java index 5d9090f772b19..6063a9cd521fd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java @@ -162,6 +162,7 @@ public StringBuilder makeCommands(long containerMemory, String clusterSpec, Stri // Set the necessary command to execute on the allocated container Vector vargs = new Vector(5); vargs.add(ApplicationConstants.Environment.JAVA_HOME.$$() + "/bin/java"); + //vargs.add("-Xmx" + containerMemory + "m"); vargs.add("-Xmx" + containerMemory + "m"); String containerClassName = TFServer.class.getName(); vargs.add(containerClassName); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java index 0aae4ae252d35..d733f972e5bd8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java @@ -145,7 +145,8 @@ public void run() { public void startTFServer() { execCmd("ls -l"); -/* org.tensorflow.bridge.TFServer server = new org.tensorflow.bridge.TFServer(cluster, jobName, taskIndex); - server.start();*/ + org.tensorflow.bridge.TFServer server = new org.tensorflow.bridge.TFServer(cluster, jobName, taskIndex); + server.start(); + server.join(); } } From 4602f000a9981d5f0e17f0ea0d3f8025a98fbb85 Mon Sep 17 00:00:00 2001 From: muzhongz Date: Thu, 12 Jan 2017 15:11:24 +0800 Subject: [PATCH 20/32] RES: copy the files to hdfs once on Client endpoint --- .../tensorflow/ApplicationMaster.java | 11 +-- .../yarn/applications/tensorflow/Client.java | 86 ++++++++++++------- .../tensorflow/LaunchContainerThread.java | 2 +- .../tensorflow/TFAmContainer.java | 14 ++- .../tensorflow/TFApplication.java | 3 +- .../applications/tensorflow/TFContainer.java | 6 ++ 6 files changed, 79 insertions(+), 43 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index 41f9d31f36c14..96972c62f49a6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -341,8 +341,7 @@ public boolean init(String[] args) throws ParseException, IOException { + appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + appAttemptID.getAttemptId()); - containerMemory = 4096; - //Integer.parseInt(cliParser.getOptionValue("container_memory", "2048")); + containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "256")); containerVirtualCores = Integer.parseInt(cliParser.getOptionValue( "container_vcores", "1")); @@ -612,7 +611,7 @@ public boolean startAllContainers() throws Exception { } - FileSystem fs = null; +/* FileSystem fs = null; try { fs = FileSystem.get(this.getConfiguration()); } catch (IOException e) { @@ -623,7 +622,9 @@ public boolean startAllContainers() throws Exception { Path dst = new Path(fs.getHomeDirectory(), suffix); if (tfServerJar != null) { fs.copyFromLocalFile(new Path(tfServerJar), dst); - } + }*/ + + for (Container allocatedContainer : this.allocatedContainers) { @@ -642,7 +643,7 @@ public boolean startAllContainers() throws Exception { LOG.info("server cid: " + allocatedContainer.getId().toString()); LaunchContainerThread launchDelegator = new LaunchContainerThread(allocatedContainer, this, clusterSpec.getServerAddress(allocatedContainer.getId().toString())); - launchDelegator.setDst(dst); + //launchDelegator.setDst(dst); launchDelegator.setTfServerJar(tfServerJar); launchDelegator.setContainerMemory(containerMemory); launchDelegator.setContainerRetryPolicy(containerRetryPolicy); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index 60f5698459527..eff79d44ba8dc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -25,20 +25,20 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.yarn.api.ApplicationClientProtocol; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; -import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.records.*; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.client.api.YarnClientApplication; import org.apache.hadoop.yarn.client.util.YarnClientUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.util.timeline.TimelineUtils; +import sun.misc.Signal; +import sun.misc.SignalHandler; import java.io.IOException; import java.nio.ByteBuffer; @@ -67,20 +67,15 @@ public String getAppName() { // Amt. of virtual core resource to request for to run the App Master private int amVCores = 1; - // Application master jar file private String appMasterJar = ""; - private String tfConatinerJar = ""; - - private String tfServerPy = ""; - // Main class to invoke application master private final String appMasterMainClass; private String tfClientPy; - // Amt of memory to request for container in which shell script will be executed + // Amt of memory to request for container where tensorflow server will run private int containerMemory = 10; - // Amt. of virtual cores to request for container in which shell script will be executed + // Amt. of virtual cores to request for container where tensorflow server will run private int containerVirtualCores = 1; private String nodeLabelExpression = null; @@ -188,10 +183,8 @@ public Client(Configuration conf) throws Exception { "Interval between each retry, unit is milliseconds"); opts.addOption(TFApplication.OPT_TF_CLIENT, true, "Provide client python of tensorflow"); - opts.addOption(TFApplication.OPT_TF_SERVER_JAR, true, - "Provide server jar of tensorflow"); - opts.addOption(TFApplication.OPT_TF_SERVER_PY, true, - "Provide server pyscript of tensorflow"); +/* opts.addOption(TFApplication.OPT_TF_SERVER_JAR, true, + "Provide server jar of tensorflow");*/ opts.addOption(TFApplication.OPT_TF_WORKER_NUM, true, "worker quantity of tensorflow"); opts.addOption(TFApplication.OPT_TF_PS_NUM, true, @@ -233,7 +226,7 @@ public boolean init(String[] args) throws ParseException { amMemory = Integer.parseInt(cliParser.getOptionValue("master_memory", "100")); amVCores = Integer.parseInt(cliParser.getOptionValue("master_vcores", "1")); tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT, TFClient.TF_CLIENT_PY); - tfConatinerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR, TFAmContainer.APPMASTER_JAR_PATH); + //tfConatinerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR, TFAmContainer.APPMASTER_JAR_PATH); workerNum = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_WORKER_NUM, "1")); psNum = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_PS_NUM, "0")); @@ -261,7 +254,7 @@ public boolean init(String[] args) throws ParseException { tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT); } - containerMemory = 4096;//Integer.parseInt(cliParser.getOptionValue("container_memory", "4096")); + containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "4096")); containerVirtualCores = Integer.parseInt(cliParser.getOptionValue("container_vcores", "1")); @@ -303,6 +296,15 @@ public boolean init(String[] args) throws ParseException { return true; } + private String copyLocalFileToDfs(FileSystem fs, String appId, String srcFilePath, String dstFileName) throws IOException { + String suffix = TFYarnConstants.APP_NAME + "/" + appId + "/" + dstFileName; + Path dst = new Path(fs.getHomeDirectory(), suffix); + if (srcFilePath != null) { + fs.copyFromLocalFile(new Path(srcFilePath), dst); + } + return dst.toString(); + } + /** * Main run function for the client * @return true if application completed successfully @@ -383,19 +385,14 @@ public boolean run() throws IOException, YarnException { Set tags = new HashSet(); appContext.setApplicationTags(tags); - // set local resources for the application master - // local files or archives as needed - // In this scenario, the jar file for the application master is part of the local resources Map localResources = new HashMap(); TFAmContainer tfAmContainer = new TFAmContainer(this); - LOG.info("Copy App Master jar from local filesystem and add to local environment"); - // Copy the application master jar to the filesystem - // Create a local resource to point to the destination jar path + // Copy the application jar to the filesystem FileSystem fs = FileSystem.get(conf); - tfAmContainer.addToLocalResources(fs, appMasterJar, TFAmContainer.APPMASTER_JAR_PATH, appId.toString(), - localResources, null); + String dstJarPath = copyLocalFileToDfs(fs, appId.toString(), appMasterJar, TFContainer.SERVER_JAR_PATH); + tfAmContainer.addToLocalResources(fs, new Path(dstJarPath), TFAmContainer.APPMASTER_JAR_PATH, localResources); // Set the log4j properties if needed if (!log4jPropFile.isEmpty()) { @@ -408,26 +405,20 @@ public boolean run() throws IOException, YarnException { Map env = tfAmContainer.setJavaEnv(conf); - // Set the necessary command to execute the application master - if (null != nodeLabelExpression) { appContext.setNodeLabelExpression(nodeLabelExpression); } StringBuilder command = tfAmContainer.makeCommands(amMemory, appMasterMainClass, containerMemory, containerVirtualCores, - workerNum, psNum, tfConatinerJar, containerRetryOptions); + workerNum, psNum, dstJarPath, containerRetryOptions); - LOG.info("Completed setting up app master command " + command.toString()); + LOG.info("AppMaster command: " + command.toString()); List commands = new ArrayList(); commands.add(command.toString()); - // Set up the container launch context for the application master ContainerLaunchContext amContainer = ContainerLaunchContext.newInstance( localResources, env, commands, null, null, null); - // Set up resource type requirements - // For now, both memory and vcores are supported, so we set memory and - // vcores requirements Resource capability = Resource.newInstance(amMemory, amVCores); appContext.setResource(capability); @@ -471,7 +462,7 @@ public boolean run() throws IOException, YarnException { LOG.info("Submitting application to ASM"); yarnClient.submitApplication(appContext); - + handleSignal(appId); return monitorApplication(appId); } @@ -565,6 +556,35 @@ else if (YarnApplicationState.KILLED == state } + private class ClientSignalHandler implements SignalHandler { + + public static final String SIG_INT = "INT"; + private ApplicationId appId = null; + + public ClientSignalHandler(ApplicationId appId) { + this.appId = appId; + } + + @Override + public void handle(Signal signal) { + if (signal.getName().equals(SIG_INT)) { + try { + forceKillApplication(appId); + } catch (YarnException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + System.exit(0); + } + } + } + + private void handleSignal(ApplicationId appId) { + ClientSignalHandler sigHandler = new ClientSignalHandler(appId); + Signal.handle(new Signal(ClientSignalHandler.SIG_INT), sigHandler); + } + /** * Kill a submitted application by sending a call to the ASM * @param appId Application Id to be killed. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java index fef97cb7ce17d..9dca5851765ad 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java @@ -151,7 +151,7 @@ public void run() { ApplicationId appId = appMaster.getAppAttempId().getApplicationId(); try { - tfContainer.addToLocalResources(fs, dst, TFContainer.SERVER_JAR_PATH, localResources); + tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, localResources); /* tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, appId.toString(), localResources, null);*/ } catch (IOException e) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java index ee8ea58a33e60..24d76f2f13802 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java @@ -80,6 +80,16 @@ public void addToLocalResources(FileSystem fs, String fileSrcPath, localResources.put(fileDstPath, scRsrc); } + public void addToLocalResources(FileSystem fs, Path dst, String fileDstPath, Map localResources) throws IOException { + FileStatus scFileStatus = fs.getFileStatus(dst); + LocalResource scRsrc = + LocalResource.newInstance( + URL.fromURI(dst.toUri()), + LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, + scFileStatus.getLen(), scFileStatus.getModificationTime()); + localResources.put(fileDstPath, scRsrc); + } + public Map setJavaEnv(Configuration conf) { Map env = new HashMap(); @@ -112,7 +122,7 @@ public Map setJavaEnv(Configuration conf) { public StringBuilder makeCommands(long amMemory, String appMasterMainClass, int containerMemory, int containerVirtualCores, - int workerNumContainers, int psNumContainers, String tfConatinerJar, Vector containerRetryOptions) { + int workerNumContainers, int psNumContainers, String jarDfsPath, Vector containerRetryOptions) { // Set the necessary command to execute the application master Vector vargs = new Vector(30); @@ -128,7 +138,7 @@ public StringBuilder makeCommands(long amMemory, String appMasterMainClass, int vargs.add("--container_vcores " + String.valueOf(containerVirtualCores)); vargs.add(TFApplication.makeOption(TFApplication.OPT_TF_WORKER_NUM, String.valueOf(workerNumContainers))); vargs.add(TFApplication.makeOption(TFApplication.OPT_TF_PS_NUM, String.valueOf(psNumContainers))); - vargs.add("--" + TFApplication.OPT_TF_SERVER_JAR + " " + String.valueOf(tfConatinerJar)); + vargs.add("--" + TFApplication.OPT_TF_SERVER_JAR + " " + String.valueOf(jarDfsPath)); vargs.addAll(containerRetryOptions); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java index b23cd5d11ffdf..b31def988d80c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java @@ -28,9 +28,8 @@ public class TFApplication { public static final String OPT_TF_QUEUE = "queue"; public static final String OPT_TF_CLIENT = "tf_client"; - public static final String OPT_TF_CLIENT_PY = "tf_clientpy"; public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; - public static final String OPT_TF_SERVER_PY = "tf_serverpy"; + //public static final String OPT_TF_SERVER_PY = "tf_serverpy"; public static final String OPT_TF_WORKER_NUM = "num_worker"; public static final String OPT_TF_PS_NUM = "num_ps"; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java index 6063a9cd521fd..2e863290e46cf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java @@ -95,6 +95,12 @@ public void addToLocalResources(FileSystem fs, Path dst, String fileDstPath, Map localResources.put(fileDstPath, scRsrc); } + public void addToLocalResources(FileSystem fs, String srcFilePath, String fileDstPath, Map localResources) throws IOException { + + Path path = new Path(srcFilePath); + addToLocalResources(fs, path, fileDstPath, localResources); + } + public void addToLocalResources(FileSystem fs, String fileSrcPath, String fileDstPath, String appId, Map localResources, From 741f5f737ffab678a42613619f881d2290b058b7 Mon Sep 17 00:00:00 2001 From: muzhongz Date: Tue, 17 Jan 2017 09:51:10 +0800 Subject: [PATCH 21/32] pass the "container_memory" by jvm parameters --- .../bin/yarn-tf | 3 +- .../tensorflow/ApplicationMaster.java | 63 +------------------ .../yarn/applications/tensorflow/Client.java | 18 +----- .../tensorflow/LaunchContainerThread.java | 3 +- .../tensorflow/TFAmContainer.java | 4 +- .../applications/tensorflow/TFContainer.java | 1 + .../applications/tensorflow/TFServer.java | 23 +++---- 7 files changed, 19 insertions(+), 96 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/bin/yarn-tf b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/bin/yarn-tf index 8d5d7e0d3fce0..ad45262c82c5c 100755 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/bin/yarn-tf +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/bin/yarn-tf @@ -41,4 +41,5 @@ hadoop jar $JAR $CLIENT_MAIN_CLASS \ --jar $JAR \ --tf_client $JOB \ --num_worker $WORKERS \ - --num_ps $PSES + --num_ps $PSES \ + --container_memory 4096 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index 96972c62f49a6..1a6881f1d6f7b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -82,30 +82,24 @@ public class ApplicationMaster { private static final Log LOG = LogFactory.getLog(ApplicationMaster.class); - // Configuration private Configuration conf; public Configuration getConfiguration() { return conf; } - // Handle to communicate with the Resource Manager - @SuppressWarnings("rawtypes") private AMRMClientAsync amRMClient; // In both secure and non-secure modes, this points to the job-submitter. @VisibleForTesting UserGroupInformation appSubmitterUgi; - // Handle to communicate with the Node Manager private NMClientAsync nmClientAsync; public NMClientAsync getNMClientAsync() { return nmClientAsync; } - // Listen to process the response from the Node Manager private NMCallbackHandler containerListener; - // Application Attempt Id ( combination of attemptId and fail count ) @VisibleForTesting protected ApplicationAttemptId appAttemptID; @@ -124,11 +118,8 @@ public ApplicationAttemptId getAppAttempId() { @VisibleForTesting protected int numTotalContainers = 1; - // Memory to request for the container on which the shell command will run private long containerMemory = 10; - // VirtualCores to request for the container on which the shell command will run private int containerVirtualCores = 1; - // Priority of the request private int requestPriority; private int numTotalWokerContainers = 1; @@ -156,7 +147,7 @@ public ApplicationAttemptId getAppAttempId() { private int containerMaxRetries = 0; private int containrRetryInterval = 0; - // TF server jar file + // TF server jar file path on hdfs private String tfServerJar = ""; // Hardcoded path to custom log_properties @@ -211,38 +202,7 @@ public static void main(String[] args) { } } - /** - * Dump out contents of $CWD and the environment to stdout for debugging - */ - private void dumpOutDebugInfo() { - - LOG.info("Dump debug output"); - Map envs = System.getenv(); - for (Map.Entry env : envs.entrySet()) { - LOG.info("System env: key=" + env.getKey() + ", val=" + env.getValue()); - System.out.println("System env: key=" + env.getKey() + ", val=" - + env.getValue()); - } - - BufferedReader buf = null; - try { - String lines = Shell.WINDOWS ? Shell.execCommand("cmd", "/c", "dir") : - Shell.execCommand("ls", "-al"); - buf = new BufferedReader(new StringReader(lines)); - String line = ""; - while ((line = buf.readLine()) != null) { - LOG.info("System CWD content: " + line); - System.out.println("System CWD content: " + line); - } - } catch (IOException e) { - e.printStackTrace(); - } finally { - IOUtils.cleanup(LOG, buf); - } - } - public ApplicationMaster() { - // Set up the configuration conf = new YarnConfiguration(); } @@ -276,15 +236,10 @@ public boolean init(String[] args) throws ParseException, IOException { opts.addOption("container_retry_interval", true, "Interval between each retry, unit is milliseconds"); - opts.addOption("jar", true, "Jar file containing the tensorflow server"); opts.addOption(TFApplication.OPT_TF_SERVER_JAR, true, "Provide container jar of tensorflow"); opts.addOption(TFApplication.OPT_TF_WORKER_NUM, true, "Provide worker server number of tensorflow"); opts.addOption(TFApplication.OPT_TF_PS_NUM, true, "Provide ps server number of tensorflow"); - LOG.info("print args"); - for (String arg : args) { - LOG.info(arg); - } CommandLine cliParser = new GnuParser().parse(opts, args); if (args.length == 0) { @@ -293,7 +248,6 @@ public boolean init(String[] args) throws ParseException, IOException { "No args specified for application master to initialize"); } - //Check whether customer log4j.properties file exists if (fileExist(log4jPath)) { try { Log4jPropertyHelper.updateLog4jConfiguration(ApplicationMaster.class, @@ -611,21 +565,6 @@ public boolean startAllContainers() throws Exception { } -/* FileSystem fs = null; - try { - fs = FileSystem.get(this.getConfiguration()); - } catch (IOException e) { - e.printStackTrace(); - } - - String suffix = TFYarnConstants.APP_NAME + "/" + getAppAttempId().getApplicationId() + "/" + TFContainer.SERVER_JAR_PATH; - Path dst = new Path(fs.getHomeDirectory(), suffix); - if (tfServerJar != null) { - fs.copyFromLocalFile(new Path(tfServerJar), dst); - }*/ - - - for (Container allocatedContainer : this.allocatedContainers) { LOG.info("Launching a new container." diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index eff79d44ba8dc..5d80c39a9bfc7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -50,21 +50,15 @@ public class Client { private static final Log LOG = LogFactory.getLog(Client.class); - // Configuration private Configuration conf; private YarnClient yarnClient; - // Application master specific info to register a new Application with RM/ASM private String appName = TFYarnConstants.APP_NAME; public String getAppName() { return appName; } - // App master priority private int amPriority = 0; - // Queue for App master private String amQueue = ""; - // Amt. of memory resource to request for to run the App Master private long amMemory = 100; - // Amt. of virtual core resource to request for to run the App Master private int amVCores = 1; private String appMasterJar = ""; @@ -302,6 +296,7 @@ private String copyLocalFileToDfs(FileSystem fs, String appId, String srcFilePat if (srcFilePath != null) { fs.copyFromLocalFile(new Path(srcFilePath), dst); } + LOG.info("Copy " + srcFilePath + " to " + dst.toString()); return dst.toString(); } @@ -395,10 +390,10 @@ public boolean run() throws IOException, YarnException { tfAmContainer.addToLocalResources(fs, new Path(dstJarPath), TFAmContainer.APPMASTER_JAR_PATH, localResources); // Set the log4j properties if needed - if (!log4jPropFile.isEmpty()) { +/* if (!log4jPropFile.isEmpty()) { tfAmContainer.addToLocalResources(fs, log4jPropFile, log4jPath, appId.toString(), localResources, null); - } + }*/ // Set the necessary security tokens as needed //amContainer.setContainerTokens(containerToken); @@ -483,14 +478,12 @@ private boolean monitorApplication(ApplicationId appId) while (true) { - // Check app status every 1 second. try { Thread.sleep(1000); } catch (InterruptedException e) { LOG.debug("Thread sleep in monitoring loop interrupted"); } - // Get application report for the appId we are interested in ApplicationReport report = yarnClient.getApplicationReport(appId); LOG.info("Got application report from ASM for" @@ -547,11 +540,6 @@ else if (YarnApplicationState.KILLED == state return false; } -/* if (System.currentTimeMillis() > (clientStartTime + clientTimeout)) { - LOG.info("Reached client specified timeout for application. Killing application"); - forceKillApplication(appId); - return false; - }*/ } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java index 9dca5851765ad..766238ee3a510 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java @@ -150,6 +150,7 @@ public void run() { ApplicationId appId = appMaster.getAppAttempId().getApplicationId(); + LOG.info("tfServer jar " + tfServerJar); try { tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, localResources); /* tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, appId.toString(), @@ -160,7 +161,7 @@ public void run() { LOG.info("clusterspec: " + this.serverAddress.getClusterSpec().toString()); - this.serverAddress.getClusterSpec().testClusterString(); + //this.serverAddress.getClusterSpec().testClusterString(); ClusterSpec cs = this.serverAddress.getClusterSpec(); StringBuilder command = null; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java index 24d76f2f13802..c985ec85118a9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFAmContainer.java @@ -50,7 +50,7 @@ public TFAmContainer(Client client) { this.client = client; } - public void addToLocalResources(FileSystem fs, String fileSrcPath, + /* public void addToLocalResources(FileSystem fs, String fileSrcPath, String fileDstPath, String appId, Map localResources, String resources) throws IOException { String suffix = @@ -78,7 +78,7 @@ public void addToLocalResources(FileSystem fs, String fileSrcPath, LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime()); localResources.put(fileDstPath, scRsrc); - } + }*/ public void addToLocalResources(FileSystem fs, Path dst, String fileDstPath, Map localResources) throws IOException { FileStatus scFileStatus = fs.getFileStatus(dst); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java index 2e863290e46cf..f82a31f798458 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFContainer.java @@ -87,6 +87,7 @@ private void execCmd(String cmd) { public void addToLocalResources(FileSystem fs, Path dst, String fileDstPath, Map localResources) throws IOException { FileStatus scFileStatus = fs.getFileStatus(dst); + LOG.info("Path " + dst.toString() + "->" + " " + fileDstPath); LocalResource scRsrc = LocalResource.newInstance( URL.fromURI(dst.toUri()), diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java index d733f972e5bd8..455f6e1d9397f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFServer.java @@ -36,9 +36,8 @@ /** * Created by muzhongz on 16-11-29. */ -public class TFServer implements Runnable { +public class TFServer { private static final Log LOG = LogFactory.getLog(TFServer.class); - private String tfServerPy; public static final String OPT_CS = "cs"; public static final String OPT_TI = "ti"; @@ -131,22 +130,16 @@ private void execCmd(String cmd) { } } - @Override - public void run() { - execCmd("ls -l"); - execCmd("pwd"); - String command = tfServerPy; - execCmd(command); - } - - - private String address; - private int port; - public void startTFServer() { - execCmd("ls -l"); + LOG.info("Launch a new tensorflow " + jobName + taskIndex); + /* try { + Thread.sleep(10000); + } catch (InterruptedException e) { + e.printStackTrace(); + }*/ org.tensorflow.bridge.TFServer server = new org.tensorflow.bridge.TFServer(cluster, jobName, taskIndex); server.start(); server.join(); + LOG.info("Ternsorflow " + jobName + taskIndex + "stopped!"); } } From 1ad6b80618bdc5fa3a28a2102312a30bfbb0f6b2 Mon Sep 17 00:00:00 2001 From: muzhongz Date: Tue, 17 Jan 2017 14:14:32 +0800 Subject: [PATCH 22/32] OPT: convert the raw string options of client and am to constants --- .../tensorflow/ApplicationMaster.java | 37 +++++----- .../yarn/applications/tensorflow/Client.java | 74 +++++++++---------- .../tensorflow/LaunchContainerThread.java | 20 +---- .../tensorflow/TFApplication.java | 15 +++- .../applications/tensorflow/TFClient.java | 74 ++++--------------- 5 files changed, 82 insertions(+), 138 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java index 1a6881f1d6f7b..55bb27db46ddd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/ApplicationMaster.java @@ -216,24 +216,24 @@ public ApplicationMaster() { */ public boolean init(String[] args) throws ParseException, IOException { Options opts = new Options(); - opts.addOption("app_attempt_id", true, + opts.addOption(TFApplication.OPT_TF_APP_ATTEMPT_ID, true, "App Attempt ID. Not to be used unless for testing purposes"); - opts.addOption("container_memory", true, + opts.addOption(TFApplication.OPT_TF_CONTAINER_MEMORY, true, "Amount of memory in MB to be requested to run the shell command"); - opts.addOption("container_vcores", true, + opts.addOption(TFApplication.OPT_TF_CONTAINER_VCORES, true, "Amount of virtual cores to be requested to run the shell command"); - opts.addOption("priority", true, "Application Priority. Default 0"); - opts.addOption("container_retry_policy", true, + opts.addOption(TFApplication.OPT_TF_PRIORITY, true, "Application Priority. Default 0"); + opts.addOption(TFApplication.OPT_TF_CONTAINER_RETRY_POLICY, true, "Retry policy when container fails to run, " + "0: NEVER_RETRY, 1: RETRY_ON_ALL_ERRORS, " + "2: RETRY_ON_SPECIFIC_ERROR_CODES"); - opts.addOption("container_retry_error_codes", true, + opts.addOption(TFApplication.OPT_TF_CONTAINER_RETRY_ERROR_CODES, true, "When retry policy is set to RETRY_ON_SPECIFIC_ERROR_CODES, error " + "codes is specified with this option, " + "e.g. --container_retry_error_codes 1,2,3"); - opts.addOption("container_max_retries", true, + opts.addOption(TFApplication.OPT_TF_CONTAINER_MAX_RETRIES, true, "If container could retry, it specifies max retires"); - opts.addOption("container_retry_interval", true, + opts.addOption(TFApplication.OPT_TF_CONTAINER_RETRY_INTERVAL, true, "Interval between each retry, unit is milliseconds"); opts.addOption(TFApplication.OPT_TF_SERVER_JAR, true, "Provide container jar of tensorflow"); @@ -260,8 +260,8 @@ public boolean init(String[] args) throws ParseException, IOException { Map envs = System.getenv(); if (!envs.containsKey(Environment.CONTAINER_ID.name())) { - if (cliParser.hasOption("app_attempt_id")) { - String appIdStr = cliParser.getOptionValue("app_attempt_id", ""); + if (cliParser.hasOption(TFApplication.OPT_TF_APP_ATTEMPT_ID)) { + String appIdStr = cliParser.getOptionValue(TFApplication.OPT_TF_APP_ATTEMPT_ID, ""); appAttemptID = ApplicationAttemptId.fromString(appIdStr); } else { throw new IllegalArgumentException( @@ -295,9 +295,9 @@ public boolean init(String[] args) throws ParseException, IOException { + appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + appAttemptID.getAttemptId()); - containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "256")); + containerMemory = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_CONTAINER_MEMORY, "256")); containerVirtualCores = Integer.parseInt(cliParser.getOptionValue( - "container_vcores", "1")); + TFApplication.OPT_TF_CONTAINER_VCORES, "1")); numTotalWokerContainers = Integer.parseInt(cliParser.getOptionValue( @@ -316,22 +316,22 @@ public boolean init(String[] args) throws ParseException, IOException { } requestPriority = Integer.parseInt(cliParser - .getOptionValue("priority", "0")); + .getOptionValue(TFApplication.OPT_TF_PRIORITY, "0")); containerRetryPolicy = ContainerRetryPolicy.values()[ Integer.parseInt(cliParser.getOptionValue( - "container_retry_policy", "0"))]; - if (cliParser.hasOption("container_retry_error_codes")) { + TFApplication.OPT_TF_CONTAINER_RETRY_POLICY, "0"))]; + if (cliParser.hasOption(TFApplication.OPT_TF_CONTAINER_RETRY_ERROR_CODES)) { containerRetryErrorCodes = new HashSet<>(); for (String errorCode : - cliParser.getOptionValue("container_retry_error_codes").split(",")) { + cliParser.getOptionValue(TFApplication.OPT_TF_CONTAINER_RETRY_ERROR_CODES).split(",")) { containerRetryErrorCodes.add(Integer.parseInt(errorCode)); } } containerMaxRetries = Integer.parseInt( - cliParser.getOptionValue("container_max_retries", "0")); + cliParser.getOptionValue(TFApplication.OPT_TF_CONTAINER_MAX_RETRIES, "0")); containrRetryInterval = Integer.parseInt(cliParser.getOptionValue( - "container_retry_interval", "0")); + TFApplication.OPT_TF_CONTAINER_RETRY_INTERVAL, "0")); tfServerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR, TFAmContainer.APPMASTER_JAR_PATH); @@ -582,7 +582,6 @@ public boolean startAllContainers() throws Exception { LOG.info("server cid: " + allocatedContainer.getId().toString()); LaunchContainerThread launchDelegator = new LaunchContainerThread(allocatedContainer, this, clusterSpec.getServerAddress(allocatedContainer.getId().toString())); - //launchDelegator.setDst(dst); launchDelegator.setTfServerJar(tfServerJar); launchDelegator.setContainerMemory(containerMemory); launchDelegator.setContainerRetryPolicy(containerRetryPolicy); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java index 5d80c39a9bfc7..fdad20bf720b7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/Client.java @@ -142,43 +142,41 @@ public Client(Configuration conf) throws Exception { yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); opts = new Options(); - opts.addOption("appname", true, "Application Name. Default value - tensorflow"); - opts.addOption("priority", true, "Application Priority. Default 0"); - opts.addOption("queue", true, "RM Queue in which this application is to be submitted"); + opts.addOption(TFApplication.OPT_TF_APPNAME, true, "Application Name. Default value - tensorflow"); + opts.addOption(TFApplication.OPT_TF_PRIORITY, true, "Application Priority. Default 0"); + opts.addOption(TFApplication.OPT_TF_QUEUE, true, "RM Queue in which this application is to be submitted"); opts.addOption("jar", true, "Jar file containing the application master"); - opts.addOption("master_memory", true, "Amount of memory in MB to be requested to run the application master"); - opts.addOption("master_vcores", true, "Amount of virtual cores to be requested to run the application master"); - opts.addOption("container_memory", true, "Amount of memory in MB to be requested to run a tensorflow worker"); - opts.addOption("container_vcores", true, "Amount of virtual cores to be requested to run a tensorflow worker"); - opts.addOption("log_properties", true, "log4j.properties file"); - opts.addOption("attempt_failures_validity_interval", true, + opts.addOption(TFApplication.OPT_TF_MASTER_MEMORY, true, "Amount of memory in MB to be requested to run the application master"); + opts.addOption(TFApplication.OPT_TF_MASTER_VCORES, true, "Amount of virtual cores to be requested to run the application master"); + opts.addOption(TFApplication.OPT_TF_CONTAINER_MEMORY, true, "Amount of memory in MB to be requested to run a tensorflow worker"); + opts.addOption(TFApplication.OPT_TF_CONTAINER_VCORES, true, "Amount of virtual cores to be requested to run a tensorflow worker"); + opts.addOption(TFApplication.OPT_TF_LOG_PROPERTIES, true, "log4j.properties file"); + opts.addOption(TFApplication.OPT_TF_ATTEMPT_FAILURES_VALIDITY_INTERVAL, true, "when attempt_failures_validity_interval in milliseconds is set to > 0," + "the failure number will not take failures which happen out of " + "the validityInterval into failure count. " + "If failure count reaches to maxAppAttempts, " + "the application will be failed."); - opts.addOption("node_label_expression", true, + opts.addOption(TFApplication.OPT_TF_NODE_LABEL_EXPRESSION, true, "Node label expression to determine the nodes" + " where all the containers of this application" + " will be allocated, \"\" means containers" + " can be allocated anywhere, if you don't specify the option," + " default node_label_expression of queue will be used."); - opts.addOption("container_retry_policy", true, + opts.addOption(TFApplication.OPT_TF_CONTAINER_RETRY_POLICY, true, "Retry policy when container fails to run, " + "0: NEVER_RETRY, 1: RETRY_ON_ALL_ERRORS, " + "2: RETRY_ON_SPECIFIC_ERROR_CODES"); - opts.addOption("container_retry_error_codes", true, + opts.addOption(TFApplication.OPT_TF_CONTAINER_RETRY_ERROR_CODES, true, "When retry policy is set to RETRY_ON_SPECIFIC_ERROR_CODES, error " + "codes is specified with this option, " + "e.g. --container_retry_error_codes 1,2,3"); - opts.addOption("container_max_retries", true, + opts.addOption(TFApplication.OPT_TF_CONTAINER_MAX_RETRIES, true, "If container could retry, it specifies max retires"); - opts.addOption("container_retry_interval", true, + opts.addOption(TFApplication.OPT_TF_CONTAINER_RETRY_INTERVAL, true, "Interval between each retry, unit is milliseconds"); opts.addOption(TFApplication.OPT_TF_CLIENT, true, "Provide client python of tensorflow"); -/* opts.addOption(TFApplication.OPT_TF_SERVER_JAR, true, - "Provide server jar of tensorflow");*/ opts.addOption(TFApplication.OPT_TF_WORKER_NUM, true, "worker quantity of tensorflow"); opts.addOption(TFApplication.OPT_TF_PS_NUM, true, @@ -214,11 +212,11 @@ public boolean init(String[] args) throws ParseException { } } - appName = cliParser.getOptionValue("appname", "tensorflow"); - amPriority = Integer.parseInt(cliParser.getOptionValue("priority", "0")); - amQueue = cliParser.getOptionValue("queue", "default"); - amMemory = Integer.parseInt(cliParser.getOptionValue("master_memory", "100")); - amVCores = Integer.parseInt(cliParser.getOptionValue("master_vcores", "1")); + appName = cliParser.getOptionValue(TFApplication.OPT_TF_APPNAME, "tensorflow"); + amPriority = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_PRIORITY, "0")); + amQueue = cliParser.getOptionValue(TFApplication.OPT_TF_QUEUE, "default"); + amMemory = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_MASTER_MEMORY, "100")); + amVCores = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_MASTER_VCORES, "1")); tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT, TFClient.TF_CLIENT_PY); //tfConatinerJar = cliParser.getOptionValue(TFApplication.OPT_TF_SERVER_JAR, TFAmContainer.APPMASTER_JAR_PATH); workerNum = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_WORKER_NUM, "1")); @@ -248,8 +246,8 @@ public boolean init(String[] args) throws ParseException { tfClientPy = cliParser.getOptionValue(TFApplication.OPT_TF_CLIENT); } - containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "4096")); - containerVirtualCores = Integer.parseInt(cliParser.getOptionValue("container_vcores", "1")); + containerMemory = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_CONTAINER_MEMORY, "4096")); + containerVirtualCores = Integer.parseInt(cliParser.getOptionValue(TFApplication.OPT_TF_CONTAINER_VCORES, "1")); if (containerMemory < 0 || containerVirtualCores < 0 || workerNum < 1 || psNum < 0) { @@ -261,30 +259,30 @@ public boolean init(String[] args) throws ParseException { + ", ps=" + psNum); } - nodeLabelExpression = cliParser.getOptionValue("node_label_expression", null); + nodeLabelExpression = cliParser.getOptionValue(TFApplication.OPT_TF_NODE_LABEL_EXPRESSION, null); attemptFailuresValidityInterval = Long.parseLong(cliParser.getOptionValue( - "attempt_failures_validity_interval", "-1")); + TFApplication.OPT_TF_ATTEMPT_FAILURES_VALIDITY_INTERVAL, "-1")); - log4jPropFile = cliParser.getOptionValue("log_properties", ""); + log4jPropFile = cliParser.getOptionValue(TFApplication.OPT_TF_LOG_PROPERTIES, ""); // Get container retry options - if (cliParser.hasOption("container_retry_policy")) { - containerRetryOptions.add("--container_retry_policy " - + cliParser.getOptionValue("container_retry_policy")); + if (cliParser.hasOption(TFApplication.OPT_TF_CONTAINER_RETRY_POLICY)) { + containerRetryOptions.add(TFApplication.makeOption(TFApplication.OPT_TF_CONTAINER_RETRY_POLICY, + cliParser.getOptionValue(TFApplication.OPT_TF_CONTAINER_RETRY_POLICY))); } - if (cliParser.hasOption("container_retry_error_codes")) { - containerRetryOptions.add("--container_retry_error_codes " - + cliParser.getOptionValue("container_retry_error_codes")); + if (cliParser.hasOption(TFApplication.OPT_TF_CONTAINER_RETRY_ERROR_CODES)) { + containerRetryOptions.add(TFApplication.makeOption(TFApplication.OPT_TF_CONTAINER_RETRY_ERROR_CODES, + cliParser.getOptionValue(TFApplication.OPT_TF_CONTAINER_RETRY_ERROR_CODES))); } - if (cliParser.hasOption("container_max_retries")) { - containerRetryOptions.add("--container_max_retries " - + cliParser.getOptionValue("container_max_retries")); + if (cliParser.hasOption(TFApplication.OPT_TF_CONTAINER_MAX_RETRIES)) { + containerRetryOptions.add(TFApplication.makeOption(TFApplication.OPT_TF_CONTAINER_MAX_RETRIES, + cliParser.getOptionValue(TFApplication.OPT_TF_CONTAINER_MAX_RETRIES))); } - if (cliParser.hasOption("container_retry_interval")) { - containerRetryOptions.add("--container_retry_interval " - + cliParser.getOptionValue("container_retry_interval")); + if (cliParser.hasOption(TFApplication.OPT_TF_CONTAINER_RETRY_INTERVAL)) { + containerRetryOptions.add(TFApplication.makeOption(TFApplication.OPT_TF_CONTAINER_RETRY_INTERVAL, + cliParser.getOptionValue(TFApplication.OPT_TF_CONTAINER_RETRY_INTERVAL))); } return true; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java index 766238ee3a510..9ed743d9ad17e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/LaunchContainerThread.java @@ -35,10 +35,8 @@ public class LaunchContainerThread implements Runnable { private static final Log LOG = LogFactory.getLog(LaunchContainerThread.class); - // Allocated container private Container container; private String tfServerJar; - // Memory to request for the container on which the shell command will run private long containerMemory = 10; // Container retry options @@ -113,16 +111,6 @@ public LaunchContainerThread(Container container, ApplicationMaster appMaster, T } } - private Path dst; - - public Path getDst() { - return dst; - } - - public void setDst(Path dst) { - this.dst = dst; - } - @Override /** * Connects to CM, sets up container launch context @@ -142,19 +130,14 @@ public void run() { TFContainer tfContainer = new TFContainer(appMaster); - // Set the java environment Map env = tfContainer.setJavaEnv(appMaster.getConfiguration(), null); - // Set the local resources Map localResources = new HashMap(); ApplicationId appId = appMaster.getAppAttempId().getApplicationId(); - LOG.info("tfServer jar " + tfServerJar); try { tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, localResources); -/* tfContainer.addToLocalResources(fs, tfServerJar, TFContainer.SERVER_JAR_PATH, appId.toString(), - localResources, null);*/ } catch (IOException e) { e.printStackTrace(); } @@ -179,7 +162,6 @@ public void run() { List commands = new ArrayList(); commands.add(command.toString()); - //commands.add("/home/muzhongz/tfonyarn/tf_server.sh"); if (serverAddress != null) { LOG.info(serverAddress.getJobName() + " : " + serverAddress.getAddress() + ":" + serverAddress.getPort()); } @@ -189,7 +171,7 @@ public void run() { containerRetryPolicy, containerRetryErrorCodes, containerMaxRetries, containrRetryInterval); for (String cmd : commands) { - LOG.info("command: " + cmd.toString()); + LOG.info("Container " + container.getId() + " command: " + cmd.toString()); } ContainerLaunchContext ctx = ContainerLaunchContext.newInstance( localResources, env, commands, null, appMaster.getAllTokens().duplicate(), diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java index b31def988d80c..e1c14ddf104b4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFApplication.java @@ -26,14 +26,25 @@ public class TFApplication { public static final String OPT_TF_APPNAME = "appname"; public static final String OPT_TF_PRIORITY = "priority"; public static final String OPT_TF_QUEUE = "queue"; + public static final String OPT_TF_MASTER_MEMORY = "master_memory"; + public static final String OPT_TF_MASTER_VCORES = "master_vcores"; + public static final String OPT_TF_CONTAINER_MEMORY = "container_memory"; + public static final String OPT_TF_CONTAINER_VCORES = "container_vcores"; + public static final String OPT_TF_LOG_PROPERTIES = "log_properties"; + public static final String OPT_TF_ATTEMPT_FAILURES_VALIDITY_INTERVAL = "attempt_failures_validity_interval"; + public static final String OPT_TF_NODE_LABEL_EXPRESSION = "node_label_expression"; + public static final String OPT_TF_CONTAINER_RETRY_POLICY = "container_retry_policy"; + public static final String OPT_TF_CONTAINER_RETRY_ERROR_CODES = "container_retry_error_codes"; + public static final String OPT_TF_CONTAINER_MAX_RETRIES = "container_max_retries"; + public static final String OPT_TF_CONTAINER_RETRY_INTERVAL = "container_retry_interval"; + + public static final String OPT_TF_APP_ATTEMPT_ID = "app_attempt_id"; public static final String OPT_TF_CLIENT = "tf_client"; public static final String OPT_TF_SERVER_JAR = "tf_serverjar"; - //public static final String OPT_TF_SERVER_PY = "tf_serverpy"; public static final String OPT_TF_WORKER_NUM = "num_worker"; public static final String OPT_TF_PS_NUM = "num_ps"; - public static String makeOption(String opt, String val) { return "--" + opt + " " + val; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java index f1c7fff1d3799..7ba97a5ed13ab 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-tensorflow/src/main/java/org/apache/hadoop/yarn/applications/tensorflow/TFClient.java @@ -36,12 +36,6 @@ public class TFClient implements Runnable { private static final Log LOG = LogFactory.getLog(TFClient.class); public static final String TF_CLIENT_PY = "tf_client.py"; private String tfClientPy; - private String tfMasterAddress; - private int tfMasterPort = TFYarnConstants.INVALID_TCP_PORT; - private String currentDirectory; - private static final String OPT_MASTER_ADDRESS = "ma"; - private static final String OPT_MASTER_PORT = "mp"; - private static final String TF_PY_OPT_WORKERS = "wk"; private static final String TF_PY_OPT_PSES = "ps"; @@ -81,47 +75,9 @@ private void execCmd(String cmd) { } public TFClient(String tfClientPy) { - LOG.info("tf client py script: " + tfClientPy); this.tfClientPy = tfClientPy; } - private String makeOption(String opt, String val) { - - if (opt == null || opt.equals("")) { - return ""; - } - - String lead = "--"; - - if (val == null) { - lead += opt; - return lead; - } - - lead += opt; - lead += " "; - lead += val; - return lead; - } - - - @Override - public void run() { - execCmd("ls -l"); - - if (tfMasterAddress == null || tfMasterPort == TFYarnConstants.INVALID_TCP_PORT) { - LOG.fatal("invalid master address!"); - execCmd("python " + tfClientPy); - } else { - execCmd("python " + tfClientPy + " \"" + tfMasterAddress + "\"" + " " + tfMasterPort); - } - } - - public void startTensorflowClient() { - Thread thread = new Thread(this); - thread.start(); - } - public void startTensorflowClient(String clusterSpecJsonString) { if (clusterSpecJsonString == null || clusterSpecJsonString.equals("")) { return; @@ -165,26 +121,24 @@ public void startTensorflowClient(String clusterSpecJsonString) { LOG.info("workers: <" + workers + ">;" + "pses: <" + pses + ">"); - Thread thread = new Thread(new Runnable() { - @Override - public void run() { + Thread thread = new Thread(this); + thread.start(); - String cmd = "python " + tfClientPy; + } - if (workers != null) { - cmd += " " + TFApplication.makeOption(TF_PY_OPT_WORKERS, "\"" + workers + "\""); - } + @Override + public void run() { + String cmd = "python " + tfClientPy; - if (pses != null) { - cmd += " " + TFApplication.makeOption(TF_PY_OPT_PSES, "\"" + pses + "\""); - } + if (workers != null) { + cmd += " " + TFApplication.makeOption(TF_PY_OPT_WORKERS, "\"" + workers + "\""); + } - LOG.info("TF client command is [" + cmd + "]"); - execCmd(cmd); - } - }); - thread.start(); + if (pses != null) { + cmd += " " + TFApplication.makeOption(TF_PY_OPT_PSES, "\"" + pses + "\""); + } + LOG.info("TF client command is [" + cmd + "]"); + execCmd(cmd); } - } From 0c2f9844b56668d99377629fac8793fb0255d01a Mon Sep 17 00:00:00 2001 From: huafengw Date: Fri, 30 Dec 2016 14:10:29 +0800 Subject: [PATCH 23/32] initial commit of tensorflow jni bridge --- hadoop-deeplearning-project/pom.xml | 36 + .../org/tensorflow/bridge/ClusterSpec.java | 169 + .../tensorflow/bridge/StartLocalServer.java | 27 + .../java/org/tensorflow/bridge/TFServer.java | 107 + .../tensorflow/bridge/TFServerException.java | 24 + .../tensorflow/distruntime/ClusterDef.java | 812 +++ .../distruntime/ClusterDefOrBuilder.java | 53 + .../org/tensorflow/distruntime/JobDef.java | 902 +++ .../distruntime/JobDefOrBuilder.java | 106 + .../org/tensorflow/distruntime/ServerDef.java | 1246 ++++ .../distruntime/ServerDefOrBuilder.java | 112 + .../tensorflow/distruntime/ServerProtos.java | 101 + .../framework/AllocationDescription.java | 897 +++ .../AllocationDescriptionOrBuilder.java | 72 + .../AllocationDescriptionProtos.java | 61 + .../framework/AllocatorMemoryUsed.java | 612 ++ .../AllocatorMemoryUsedOrBuilder.java | 29 + .../org/tensorflow/framework/AttrValue.java | 4667 +++++++++++++ .../framework/AttrValueOrBuilder.java | 168 + .../tensorflow/framework/AttrValueProtos.java | 115 + .../org/tensorflow/framework/ConfigProto.java | 2597 ++++++++ .../framework/ConfigProtoOrBuilder.java | 326 + .../tensorflow/framework/ConfigProtos.java | 203 + .../tensorflow/framework/CostGraphDef.java | 4058 ++++++++++++ .../framework/CostGraphDefOrBuilder.java | 33 + .../tensorflow/framework/CostGraphProtos.java | 109 + .../org/tensorflow/framework/DataType.java | 553 ++ .../framework/DebugTensorWatch.java | 1134 ++++ .../framework/DebugTensorWatchOrBuilder.java | 130 + .../framework/DeviceAttributes.java | 1227 ++++ .../framework/DeviceAttributesOrBuilder.java | 110 + .../framework/DeviceAttributesProtos.java | 73 + .../tensorflow/framework/DeviceLocality.java | 445 ++ .../framework/DeviceLocalityOrBuilder.java | 19 + .../tensorflow/framework/DeviceStepStats.java | 842 +++ .../framework/DeviceStepStatsOrBuilder.java | 43 + .../org/tensorflow/framework/FunctionDef.java | 4081 ++++++++++++ .../framework/FunctionDefLibrary.java | 1063 +++ .../FunctionDefLibraryOrBuilder.java | 57 + .../framework/FunctionDefOrBuilder.java | 248 + .../tensorflow/framework/FunctionProtos.java | 149 + .../org/tensorflow/framework/GPUOptions.java | 1054 +++ .../framework/GPUOptionsOrBuilder.java | 114 + .../org/tensorflow/framework/GradientDef.java | 713 ++ .../framework/GradientDefOrBuilder.java | 45 + .../org/tensorflow/framework/GraphDef.java | 1539 +++++ .../framework/GraphDefOrBuilder.java | 163 + .../tensorflow/framework/GraphOptions.java | 1164 ++++ .../framework/GraphOptionsOrBuilder.java | 108 + .../org/tensorflow/framework/GraphProtos.java | 69 + .../tensorflow/framework/HistogramProto.java | 1078 +++ .../framework/HistogramProtoOrBuilder.java | 84 + .../org/tensorflow/framework/KernelDef.java | 2237 +++++++ .../framework/KernelDefOrBuilder.java | 130 + .../tensorflow/framework/KernelDefProtos.java | 76 + .../tensorflow/framework/LogMemoryProtos.java | 131 + .../framework/MemoryLogRawAllocation.java | 981 +++ .../MemoryLogRawAllocationOrBuilder.java | 82 + .../framework/MemoryLogRawDeallocation.java | 910 +++ .../MemoryLogRawDeallocationOrBuilder.java | 74 + .../tensorflow/framework/MemoryLogStep.java | 597 ++ .../framework/MemoryLogStepOrBuilder.java | 36 + .../framework/MemoryLogTensorAllocation.java | 833 +++ .../MemoryLogTensorAllocationOrBuilder.java | 63 + .../MemoryLogTensorDeallocation.java | 601 ++ .../MemoryLogTensorDeallocationOrBuilder.java | 37 + .../framework/MemoryLogTensorOutput.java | 907 +++ .../MemoryLogTensorOutputOrBuilder.java | 72 + .../tensorflow/framework/NameAttrList.java | 777 +++ .../framework/NameAttrListOrBuilder.java | 53 + .../org/tensorflow/framework/NodeDef.java | 1684 +++++ .../framework/NodeDefOrBuilder.java | 263 + .../tensorflow/framework/NodeExecStats.java | 2065 ++++++ .../framework/NodeExecStatsOrBuilder.java | 145 + .../org/tensorflow/framework/NodeOutput.java | 614 ++ .../framework/NodeOutputOrBuilder.java | 27 + .../org/tensorflow/framework/NodeProto.java | 75 + .../java/org/tensorflow/framework/OpDef.java | 5878 +++++++++++++++++ .../tensorflow/framework/OpDefOrBuilder.java | 252 + .../org/tensorflow/framework/OpDefProtos.java | 126 + .../tensorflow/framework/OpDeprecation.java | 604 ++ .../framework/OpDeprecationOrBuilder.java | 36 + .../java/org/tensorflow/framework/OpList.java | 720 ++ .../tensorflow/framework/OpListOrBuilder.java | 33 + .../framework/OptimizerOptions.java | 1032 +++ .../framework/OptimizerOptionsOrBuilder.java | 54 + .../tensorflow/framework/ResourceHandle.java | 1085 +++ .../framework/ResourceHandleOrBuilder.java | 93 + .../framework/ResourceHandleProto.java | 59 + .../org/tensorflow/framework/RunMetadata.java | 1297 ++++ .../framework/RunMetadataOrBuilder.java | 109 + .../org/tensorflow/framework/RunOptions.java | 1247 ++++ .../framework/RunOptionsOrBuilder.java | 90 + .../framework/SaveSliceInfoDef.java | 1122 ++++ .../framework/SaveSliceInfoDefOrBuilder.java | 102 + .../org/tensorflow/framework/StepStats.java | 712 ++ .../framework/StepStatsOrBuilder.java | 33 + .../tensorflow/framework/StepStatsProtos.java | 124 + .../org/tensorflow/framework/Summary.java | 4211 ++++++++++++ .../framework/SummaryDescription.java | 537 ++ .../SummaryDescriptionOrBuilder.java | 29 + .../framework/SummaryOrBuilder.java | 53 + .../tensorflow/framework/SummaryProtos.java | 133 + .../framework/TensorDescription.java | 936 +++ .../framework/TensorDescriptionOrBuilder.java | 76 + .../framework/TensorDescriptionProtos.java | 71 + .../org/tensorflow/framework/TensorProto.java | 3075 +++++++++ .../framework/TensorProtoOrBuilder.java | 343 + .../tensorflow/framework/TensorProtos.java | 76 + .../framework/TensorShapeProto.java | 1752 +++++ .../framework/TensorShapeProtoOrBuilder.java | 108 + .../framework/TensorShapeProtos.java | 71 + .../framework/TensorSliceProto.java | 1487 +++++ .../framework/TensorSliceProtoOrBuilder.java | 68 + .../framework/TensorSliceProtos.java | 71 + .../framework/ThreadPoolOptionProto.java | 449 ++ .../ThreadPoolOptionProtoOrBuilder.java | 20 + .../org/tensorflow/framework/TypesProtos.java | 63 + .../org/tensorflow/framework/VariableDef.java | 1069 +++ .../framework/VariableDefOrBuilder.java | 88 + .../tensorflow/framework/VariableProtos.java | 73 + .../org/tensorflow/framework/VersionDef.java | 743 +++ .../framework/VersionDefOrBuilder.java | 52 + .../tensorflow/framework/VersionsProtos.java | 58 + .../src/main/native/exception_jni.cc | 39 + .../src/main/native/exception_jni.h | 39 + .../native/org_tensorflow_bridge_TFServer.cpp | 100 + .../native/org_tensorflow_bridge_TFServer.h | 53 + pom.xml | 1 + 129 files changed, 78169 insertions(+) create mode 100644 hadoop-deeplearning-project/pom.xml create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/ClusterSpec.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/StartLocalServer.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/TFServer.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/TFServerException.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ClusterDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ClusterDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/JobDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/JobDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescription.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescriptionOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescriptionProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocatorMemoryUsed.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocatorMemoryUsedOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValue.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValueOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValueProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProto.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProtoOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DataType.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DebugTensorWatch.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DebugTensorWatchOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributes.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributesOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributesProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceLocality.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceLocalityOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceStepStats.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceStepStatsOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefLibrary.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefLibraryOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GPUOptions.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GPUOptionsOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GradientDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GradientDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphOptions.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphOptionsOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/HistogramProto.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/HistogramProtoOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDefProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/LogMemoryProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawAllocation.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawAllocationOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawDeallocation.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawDeallocationOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogStep.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogStepOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorAllocation.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorAllocationOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorDeallocation.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorDeallocationOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorOutput.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorOutputOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NameAttrList.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NameAttrListOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeExecStats.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeExecStatsOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeOutput.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeOutputOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeProto.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDefProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDeprecation.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDeprecationOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpList.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpListOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OptimizerOptions.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OptimizerOptionsOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandle.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandleOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandleProto.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunMetadata.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunMetadataOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunOptions.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunOptionsOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SaveSliceInfoDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SaveSliceInfoDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStats.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStatsOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStatsProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/Summary.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SummaryDescription.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SummaryDescriptionOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SummaryOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SummaryProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorDescription.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorDescriptionOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorDescriptionProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorProto.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorProtoOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorShapeProto.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorShapeProtoOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorShapeProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorSliceProto.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorSliceProtoOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TensorSliceProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ThreadPoolOptionProto.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ThreadPoolOptionProtoOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/TypesProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/VariableDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/VariableDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/VariableProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/VersionDef.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/VersionDefOrBuilder.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/VersionsProtos.java create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/native/exception_jni.cc create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/native/exception_jni.h create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/native/org_tensorflow_bridge_TFServer.cpp create mode 100644 hadoop-deeplearning-project/tensorflow-bridge/src/main/native/org_tensorflow_bridge_TFServer.h diff --git a/hadoop-deeplearning-project/pom.xml b/hadoop-deeplearning-project/pom.xml new file mode 100644 index 0000000000000..79cdfd5ca00f5 --- /dev/null +++ b/hadoop-deeplearning-project/pom.xml @@ -0,0 +1,36 @@ + + + + 4.0.0 + + org.apache.hadoop + hadoop-project + 3.0.0-alpha2-SNAPSHOT + ../hadoop-project + + hadoop-deeplearning-project + 3.0.0-alpha2-SNAPSHOT + Apache Hadoop Deep Learning Project + Apache Hadoop Deep Learning Project + pom + + + tensorflow-bridge + + + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/ClusterSpec.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/ClusterSpec.java new file mode 100644 index 0000000000000..9611400d61679 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/ClusterSpec.java @@ -0,0 +1,169 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.tensorflow.bridge; + +import org.tensorflow.distruntime.ClusterDef; +import org.tensorflow.distruntime.JobDef; + +import java.util.*; + +public class ClusterSpec { + + public ClusterDef cluster_def; + public Map> cluster_spec; // job_name task_index address + + public ClusterSpec(Map> cluster) //cluster: job name --> address list map + { + cluster_spec = new HashMap>(); + Iterator iter = cluster.entrySet().iterator(); + Integer i = 0; + while (iter.hasNext()) { + Map.Entry entry = (Map.Entry) iter.next(); + String key = (String) entry.getKey(); + ArrayList value = (ArrayList) entry.getValue(); + Map job_tasks = new HashMap(); + i = 0; + Iterator iter_address = value.iterator(); + while (iter_address.hasNext()) { + job_tasks.put(i, (String) iter_address.next()); + i++; + } + cluster_spec.put(key, job_tasks); + } + this.make_cluster_def(); + } + + //Create a ClusterDef based on the given cluster_spec + public void make_cluster_def() { + Map tasks; + int taskIndex; + String address; + + ClusterDef.Builder cluster_def_builder = ClusterDef.newBuilder(); + JobDef.Builder job_builder; + JobDef job; + + Collection jobSet = cluster_spec.keySet(); + List jobList = new ArrayList(jobSet); //list就是一个job name的list + Collections.sort(jobList); //sort the key of cluster_spec + + for (int i = 0; i < jobList.size(); i++) { + job_builder = JobDef.newBuilder(); + job_builder.setName(jobList.get(i)); //得到第i个job的name + tasks = cluster_spec.get(jobList.get(i)); //第i个job对应的task的一个map, taskIndex-->address + + Collection taskIndexSet = tasks.keySet(); + List taskIndexList = new ArrayList(taskIndexSet); + Collections.sort(taskIndexList); //sort the index of tasks + for (int j = 0; j < taskIndexList.size(); j++) { + taskIndex = taskIndexList.get(j); + address = tasks.get(taskIndex); + job_builder.putTasks(taskIndex, address); //把taskIndex和对应的address放到job_builder里面 + } + job = job_builder.build(); + cluster_def_builder.addJob(job); + } + + cluster_def = cluster_def_builder.build(); + } + + //Judge whether the cluster is empty + public boolean nonzero() { + return cluster_def.isInitialized(); + } + + //Judge whether two cluster specs equal to each other + public boolean equals(ClusterSpec other) { + return cluster_def.equals(other.cluster_def); + } + + //return a map from job names to their tasks(as the list form) + public Map> as_dict() { + Map> job_tasks_map = new HashMap>(); + String job_name; + List jobs = this.jobs(); + for (int i = 0; i < jobs.size(); i++) { + job_name = jobs.get(i); + List task_indices = this.task_indices(job_name); + if (Collections.max(task_indices) + 1 == task_indices.size()) //the tasks indices are dense + { + job_tasks_map.put(job_name, this.job_tasks(job_name)); + } else //the tasks indices are not dense, manually make the list dense + { + List tasks = new ArrayList(); + Integer task_index; + for (int j = 0; j < task_indices.size(); j++) { + task_index = task_indices.get(j); + tasks.add(this.task_address(job_name, task_index)); + + } + } + } + return job_tasks_map; + } + + //返回所有的Job组成的list + public List jobs() { + Collection jobSet = cluster_spec.keySet(); + List jobList = new ArrayList(jobSet); + return jobList; + } + + //return the number of tasks defined in the given job + public int num_tasks(String job_name) { + return cluster_spec.get(job_name).keySet().size(); + } + + //return a list of valid task indices in the given job + public List task_indices(String job_name) { + Collection task_index_set = cluster_spec.get(job_name).keySet(); + List task_index_list = new ArrayList(task_index_set); + return task_index_list; + } + + //return the address of the given task in the given job + public String task_address(String job_name, Integer task_index) { + Map job = cluster_spec.get(job_name); + return job.get(task_index); + } + + //return a list of tasks addresses, where the index in the list corresponds to the task index of each task + public List job_tasks(String job_name) { + Map job = cluster_spec.get(job_name); + List address_list = new ArrayList(job.size() + 1); + + Collection taskIndexSet = job.keySet(); + List taskIndexList = new ArrayList(taskIndexSet); + Collections.sort(taskIndexList); //sort the index of tasks + int taskIndex; + String address; + for (int j = 0; j < taskIndexList.size(); j++) { + taskIndex = taskIndexList.get(j); + address = job.get(taskIndex); + //address_list.set(taskIndex,address); + address_list.add(address); + } + + return address_list; + } + + //Return the ClusterDef property + public ClusterDef as_cluster_def() { + return cluster_def; + } +} \ No newline at end of file diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/StartLocalServer.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/StartLocalServer.java new file mode 100644 index 0000000000000..7c5db2ec38599 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/StartLocalServer.java @@ -0,0 +1,27 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.tensorflow.bridge; + +public class StartLocalServer { + public static void main(String[] args) throws Exception { + TFServer server = TFServer.createLocalServer(); + System.out.println("Local Server target:" + server.getTarget()); + server.start(); + server.join(); + } +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/TFServer.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/TFServer.java new file mode 100644 index 0000000000000..75f79ad2dbf81 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/TFServer.java @@ -0,0 +1,107 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.tensorflow.bridge; + +import org.tensorflow.distruntime.ServerDef; +import org.tensorflow.framework.ConfigProto; +import java.io.ByteArrayOutputStream; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class TFServer { + public ServerDef serverDef; + private long nativeServer; + + static { + System.loadLibrary("bridge"); // Load native library at runtime + } + + public static ServerDef makeServerDef(ServerDef serverDef, String jobName, + int taskIndex, String proto, ConfigProto config) { + return ServerDef.newBuilder().mergeFrom(serverDef).setJobName(jobName) + .setTaskIndex(taskIndex).setProtocol(proto).setDefaultSessionConfig(config).build(); + } + + public static ServerDef makeServerDef(ClusterSpec clusterSpec, String jobName, + int taskIndex, String proto, ConfigProto config) { + return ServerDef.newBuilder().setCluster(clusterSpec.as_cluster_def()) + .setJobName(jobName).setProtocol(proto).setTaskIndex(taskIndex) + .setDefaultSessionConfig(config).build(); + } + + public TFServer(ClusterSpec clusterSpec, String jobName, int taskIndex, + String proto, ConfigProto config) throws TFServerException { + this.serverDef = makeServerDef(clusterSpec, jobName, taskIndex, proto, config); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try { + serverDef.writeTo(baos); + byte[] bytes = baos.toByteArray(); + baos.close(); + this.nativeServer = createServer(bytes); + } catch (TFServerException e) { + throw e; + } catch (IOException e) { + // + } + } + + public TFServer(Map> clusterSpec, String jobName, int taskIndex) + throws TFServerException { + this(new ClusterSpec(clusterSpec), jobName, taskIndex, + "grpc", ConfigProto.getDefaultInstance()); + } + + public void start() { + this.startServer(this.nativeServer); + } + + public void join() { + this.join(this.nativeServer); + } + + public void stop() { + this.stop(this.nativeServer); + } + + public String getTarget() { + return target(this.nativeServer); + } + + public static TFServer createLocalServer() { + HashMap> cluster = new HashMap>(); + List address_list = new ArrayList(); + address_list.add("localhost:0"); + cluster.put("worker",address_list); + ClusterSpec cluster_spec = new ClusterSpec(cluster); + return new TFServer(cluster_spec, "worker", 0, "grpc", ConfigProto.getDefaultInstance()); + } + + private native long createServer(byte[] server_def); + + private native void startServer(long server); + + private native void join(long server); + + private native void stop(long server); + + private native String target(long server); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/TFServerException.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/TFServerException.java new file mode 100644 index 0000000000000..7f4f5482a94c8 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/bridge/TFServerException.java @@ -0,0 +1,24 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.tensorflow.bridge; + +public class TFServerException extends RuntimeException { + public TFServerException(String errorMsg) { + super(errorMsg); + } +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ClusterDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ClusterDef.java new file mode 100644 index 0000000000000..94a35a76d6497 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ClusterDef.java @@ -0,0 +1,812 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/tensorflow_server.proto + +package org.tensorflow.distruntime; + +/** + *
+ * Defines a TensorFlow cluster as a set of jobs.
+ * 
+ * + * Protobuf type {@code tensorflow.ClusterDef} + */ +public final class ClusterDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.ClusterDef) + ClusterDefOrBuilder { + // Use ClusterDef.newBuilder() to construct. + private ClusterDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ClusterDef() { + job_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private ClusterDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + job_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + job_.add( + input.readMessage(org.tensorflow.distruntime.JobDef.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + job_ = java.util.Collections.unmodifiableList(job_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ClusterDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ClusterDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.distruntime.ClusterDef.class, org.tensorflow.distruntime.ClusterDef.Builder.class); + } + + public static final int JOB_FIELD_NUMBER = 1; + private java.util.List job_; + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public java.util.List getJobList() { + return job_; + } + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public java.util.List + getJobOrBuilderList() { + return job_; + } + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public int getJobCount() { + return job_.size(); + } + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public org.tensorflow.distruntime.JobDef getJob(int index) { + return job_.get(index); + } + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public org.tensorflow.distruntime.JobDefOrBuilder getJobOrBuilder( + int index) { + return job_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < job_.size(); i++) { + output.writeMessage(1, job_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < job_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, job_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.distruntime.ClusterDef)) { + return super.equals(obj); + } + org.tensorflow.distruntime.ClusterDef other = (org.tensorflow.distruntime.ClusterDef) obj; + + boolean result = true; + result = result && getJobList() + .equals(other.getJobList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getJobCount() > 0) { + hash = (37 * hash) + JOB_FIELD_NUMBER; + hash = (53 * hash) + getJobList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.distruntime.ClusterDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.distruntime.ClusterDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.distruntime.ClusterDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.distruntime.ClusterDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.distruntime.ClusterDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.distruntime.ClusterDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.distruntime.ClusterDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.distruntime.ClusterDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.distruntime.ClusterDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.distruntime.ClusterDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.distruntime.ClusterDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Defines a TensorFlow cluster as a set of jobs.
+   * 
+ * + * Protobuf type {@code tensorflow.ClusterDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.ClusterDef) + org.tensorflow.distruntime.ClusterDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ClusterDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ClusterDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.distruntime.ClusterDef.class, org.tensorflow.distruntime.ClusterDef.Builder.class); + } + + // Construct using org.tensorflow.distruntime.ClusterDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getJobFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (jobBuilder_ == null) { + job_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + jobBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ClusterDef_descriptor; + } + + public org.tensorflow.distruntime.ClusterDef getDefaultInstanceForType() { + return org.tensorflow.distruntime.ClusterDef.getDefaultInstance(); + } + + public org.tensorflow.distruntime.ClusterDef build() { + org.tensorflow.distruntime.ClusterDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.distruntime.ClusterDef buildPartial() { + org.tensorflow.distruntime.ClusterDef result = new org.tensorflow.distruntime.ClusterDef(this); + int from_bitField0_ = bitField0_; + if (jobBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + job_ = java.util.Collections.unmodifiableList(job_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.job_ = job_; + } else { + result.job_ = jobBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.distruntime.ClusterDef) { + return mergeFrom((org.tensorflow.distruntime.ClusterDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.distruntime.ClusterDef other) { + if (other == org.tensorflow.distruntime.ClusterDef.getDefaultInstance()) return this; + if (jobBuilder_ == null) { + if (!other.job_.isEmpty()) { + if (job_.isEmpty()) { + job_ = other.job_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureJobIsMutable(); + job_.addAll(other.job_); + } + onChanged(); + } + } else { + if (!other.job_.isEmpty()) { + if (jobBuilder_.isEmpty()) { + jobBuilder_.dispose(); + jobBuilder_ = null; + job_ = other.job_; + bitField0_ = (bitField0_ & ~0x00000001); + jobBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getJobFieldBuilder() : null; + } else { + jobBuilder_.addAllMessages(other.job_); + } + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.distruntime.ClusterDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.distruntime.ClusterDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List job_ = + java.util.Collections.emptyList(); + private void ensureJobIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + job_ = new java.util.ArrayList(job_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.distruntime.JobDef, org.tensorflow.distruntime.JobDef.Builder, org.tensorflow.distruntime.JobDefOrBuilder> jobBuilder_; + + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public java.util.List getJobList() { + if (jobBuilder_ == null) { + return java.util.Collections.unmodifiableList(job_); + } else { + return jobBuilder_.getMessageList(); + } + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public int getJobCount() { + if (jobBuilder_ == null) { + return job_.size(); + } else { + return jobBuilder_.getCount(); + } + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public org.tensorflow.distruntime.JobDef getJob(int index) { + if (jobBuilder_ == null) { + return job_.get(index); + } else { + return jobBuilder_.getMessage(index); + } + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public Builder setJob( + int index, org.tensorflow.distruntime.JobDef value) { + if (jobBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureJobIsMutable(); + job_.set(index, value); + onChanged(); + } else { + jobBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public Builder setJob( + int index, org.tensorflow.distruntime.JobDef.Builder builderForValue) { + if (jobBuilder_ == null) { + ensureJobIsMutable(); + job_.set(index, builderForValue.build()); + onChanged(); + } else { + jobBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public Builder addJob(org.tensorflow.distruntime.JobDef value) { + if (jobBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureJobIsMutable(); + job_.add(value); + onChanged(); + } else { + jobBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public Builder addJob( + int index, org.tensorflow.distruntime.JobDef value) { + if (jobBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureJobIsMutable(); + job_.add(index, value); + onChanged(); + } else { + jobBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public Builder addJob( + org.tensorflow.distruntime.JobDef.Builder builderForValue) { + if (jobBuilder_ == null) { + ensureJobIsMutable(); + job_.add(builderForValue.build()); + onChanged(); + } else { + jobBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public Builder addJob( + int index, org.tensorflow.distruntime.JobDef.Builder builderForValue) { + if (jobBuilder_ == null) { + ensureJobIsMutable(); + job_.add(index, builderForValue.build()); + onChanged(); + } else { + jobBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public Builder addAllJob( + java.lang.Iterable values) { + if (jobBuilder_ == null) { + ensureJobIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, job_); + onChanged(); + } else { + jobBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public Builder clearJob() { + if (jobBuilder_ == null) { + job_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + jobBuilder_.clear(); + } + return this; + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public Builder removeJob(int index) { + if (jobBuilder_ == null) { + ensureJobIsMutable(); + job_.remove(index); + onChanged(); + } else { + jobBuilder_.remove(index); + } + return this; + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public org.tensorflow.distruntime.JobDef.Builder getJobBuilder( + int index) { + return getJobFieldBuilder().getBuilder(index); + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public org.tensorflow.distruntime.JobDefOrBuilder getJobOrBuilder( + int index) { + if (jobBuilder_ == null) { + return job_.get(index); } else { + return jobBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public java.util.List + getJobOrBuilderList() { + if (jobBuilder_ != null) { + return jobBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(job_); + } + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public org.tensorflow.distruntime.JobDef.Builder addJobBuilder() { + return getJobFieldBuilder().addBuilder( + org.tensorflow.distruntime.JobDef.getDefaultInstance()); + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public org.tensorflow.distruntime.JobDef.Builder addJobBuilder( + int index) { + return getJobFieldBuilder().addBuilder( + index, org.tensorflow.distruntime.JobDef.getDefaultInstance()); + } + /** + *
+     * The jobs that comprise the cluster.
+     * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + public java.util.List + getJobBuilderList() { + return getJobFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.distruntime.JobDef, org.tensorflow.distruntime.JobDef.Builder, org.tensorflow.distruntime.JobDefOrBuilder> + getJobFieldBuilder() { + if (jobBuilder_ == null) { + jobBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.distruntime.JobDef, org.tensorflow.distruntime.JobDef.Builder, org.tensorflow.distruntime.JobDefOrBuilder>( + job_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + job_ = null; + } + return jobBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.ClusterDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.ClusterDef) + private static final org.tensorflow.distruntime.ClusterDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.distruntime.ClusterDef(); + } + + public static org.tensorflow.distruntime.ClusterDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ClusterDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClusterDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.distruntime.ClusterDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ClusterDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ClusterDefOrBuilder.java new file mode 100644 index 0000000000000..9afce3302041f --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ClusterDefOrBuilder.java @@ -0,0 +1,53 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/tensorflow_server.proto + +package org.tensorflow.distruntime; + +public interface ClusterDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.ClusterDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + java.util.List + getJobList(); + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + org.tensorflow.distruntime.JobDef getJob(int index); + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + int getJobCount(); + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + java.util.List + getJobOrBuilderList(); + /** + *
+   * The jobs that comprise the cluster.
+   * 
+ * + * repeated .tensorflow.JobDef job = 1; + */ + org.tensorflow.distruntime.JobDefOrBuilder getJobOrBuilder( + int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/JobDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/JobDef.java new file mode 100644 index 0000000000000..f6f12ff1d8f36 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/JobDef.java @@ -0,0 +1,902 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/tensorflow_server.proto + +package org.tensorflow.distruntime; + +/** + *
+ * Defines a single job in a TensorFlow cluster.
+ * 
+ * + * Protobuf type {@code tensorflow.JobDef} + */ +public final class JobDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.JobDef) + JobDefOrBuilder { + // Use JobDef.newBuilder() to construct. + private JobDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private JobDef() { + name_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private JobDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + tasks_ = com.google.protobuf.MapField.newMapField( + TasksDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000002; + } + com.google.protobuf.MapEntry + tasks__ = input.readMessage( + TasksDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + tasks_.getMutableMap().put( + tasks__.getKey(), tasks__.getValue()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_JobDef_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 2: + return internalGetTasks(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_JobDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.distruntime.JobDef.class, org.tensorflow.distruntime.JobDef.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * The name of this job.
+   * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * The name of this job.
+   * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TASKS_FIELD_NUMBER = 2; + private static final class TasksDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.Integer, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_JobDef_TasksEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.INT32, + 0, + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + private com.google.protobuf.MapField< + java.lang.Integer, java.lang.String> tasks_; + private com.google.protobuf.MapField + internalGetTasks() { + if (tasks_ == null) { + return com.google.protobuf.MapField.emptyMapField( + TasksDefaultEntryHolder.defaultEntry); + } + return tasks_; + } + + public int getTasksCount() { + return internalGetTasks().getMap().size(); + } + /** + *
+   * Mapping from task ID to "hostname:port" string.
+   * If the `name` field contains "worker", and the `tasks` map contains a
+   * mapping from 7 to "example.org:2222", then the device prefix
+   * "/job:worker/task:7" will be assigned to "example.org:2222".
+   * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+   * supported.
+   * 
+ * + * map<int32, string> tasks = 2; + */ + + public boolean containsTasks( + int key) { + + return internalGetTasks().getMap().containsKey(key); + } + /** + * Use {@link #getTasksMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getTasks() { + return getTasksMap(); + } + /** + *
+   * Mapping from task ID to "hostname:port" string.
+   * If the `name` field contains "worker", and the `tasks` map contains a
+   * mapping from 7 to "example.org:2222", then the device prefix
+   * "/job:worker/task:7" will be assigned to "example.org:2222".
+   * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+   * supported.
+   * 
+ * + * map<int32, string> tasks = 2; + */ + + public java.util.Map getTasksMap() { + return internalGetTasks().getMap(); + } + /** + *
+   * Mapping from task ID to "hostname:port" string.
+   * If the `name` field contains "worker", and the `tasks` map contains a
+   * mapping from 7 to "example.org:2222", then the device prefix
+   * "/job:worker/task:7" will be assigned to "example.org:2222".
+   * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+   * supported.
+   * 
+ * + * map<int32, string> tasks = 2; + */ + + public java.lang.String getTasksOrDefault( + int key, + java.lang.String defaultValue) { + + java.util.Map map = + internalGetTasks().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Mapping from task ID to "hostname:port" string.
+   * If the `name` field contains "worker", and the `tasks` map contains a
+   * mapping from 7 to "example.org:2222", then the device prefix
+   * "/job:worker/task:7" will be assigned to "example.org:2222".
+   * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+   * supported.
+   * 
+ * + * map<int32, string> tasks = 2; + */ + + public java.lang.String getTasksOrThrow( + int key) { + + java.util.Map map = + internalGetTasks().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + com.google.protobuf.GeneratedMessageV3 + .serializeIntegerMapTo( + output, + internalGetTasks(), + TasksDefaultEntryHolder.defaultEntry, + 2); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + for (java.util.Map.Entry entry + : internalGetTasks().getMap().entrySet()) { + com.google.protobuf.MapEntry + tasks__ = TasksDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, tasks__); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.distruntime.JobDef)) { + return super.equals(obj); + } + org.tensorflow.distruntime.JobDef other = (org.tensorflow.distruntime.JobDef) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && internalGetTasks().equals( + other.internalGetTasks()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (!internalGetTasks().getMap().isEmpty()) { + hash = (37 * hash) + TASKS_FIELD_NUMBER; + hash = (53 * hash) + internalGetTasks().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.distruntime.JobDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.distruntime.JobDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.distruntime.JobDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.distruntime.JobDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.distruntime.JobDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.distruntime.JobDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.distruntime.JobDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.distruntime.JobDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.distruntime.JobDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.distruntime.JobDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.distruntime.JobDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Defines a single job in a TensorFlow cluster.
+   * 
+ * + * Protobuf type {@code tensorflow.JobDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.JobDef) + org.tensorflow.distruntime.JobDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_JobDef_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 2: + return internalGetTasks(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 2: + return internalGetMutableTasks(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_JobDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.distruntime.JobDef.class, org.tensorflow.distruntime.JobDef.Builder.class); + } + + // Construct using org.tensorflow.distruntime.JobDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + internalGetMutableTasks().clear(); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_JobDef_descriptor; + } + + public org.tensorflow.distruntime.JobDef getDefaultInstanceForType() { + return org.tensorflow.distruntime.JobDef.getDefaultInstance(); + } + + public org.tensorflow.distruntime.JobDef build() { + org.tensorflow.distruntime.JobDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.distruntime.JobDef buildPartial() { + org.tensorflow.distruntime.JobDef result = new org.tensorflow.distruntime.JobDef(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.name_ = name_; + result.tasks_ = internalGetTasks(); + result.tasks_.makeImmutable(); + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.distruntime.JobDef) { + return mergeFrom((org.tensorflow.distruntime.JobDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.distruntime.JobDef other) { + if (other == org.tensorflow.distruntime.JobDef.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + internalGetMutableTasks().mergeFrom( + other.internalGetTasks()); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.distruntime.JobDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.distruntime.JobDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+     * The name of this job.
+     * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * The name of this job.
+     * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * The name of this job.
+     * 
+ * + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * The name of this job.
+     * 
+ * + * optional string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * The name of this job.
+     * 
+ * + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + java.lang.Integer, java.lang.String> tasks_; + private com.google.protobuf.MapField + internalGetTasks() { + if (tasks_ == null) { + return com.google.protobuf.MapField.emptyMapField( + TasksDefaultEntryHolder.defaultEntry); + } + return tasks_; + } + private com.google.protobuf.MapField + internalGetMutableTasks() { + onChanged();; + if (tasks_ == null) { + tasks_ = com.google.protobuf.MapField.newMapField( + TasksDefaultEntryHolder.defaultEntry); + } + if (!tasks_.isMutable()) { + tasks_ = tasks_.copy(); + } + return tasks_; + } + + public int getTasksCount() { + return internalGetTasks().getMap().size(); + } + /** + *
+     * Mapping from task ID to "hostname:port" string.
+     * If the `name` field contains "worker", and the `tasks` map contains a
+     * mapping from 7 to "example.org:2222", then the device prefix
+     * "/job:worker/task:7" will be assigned to "example.org:2222".
+     * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+     * supported.
+     * 
+ * + * map<int32, string> tasks = 2; + */ + + public boolean containsTasks( + int key) { + + return internalGetTasks().getMap().containsKey(key); + } + /** + * Use {@link #getTasksMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getTasks() { + return getTasksMap(); + } + /** + *
+     * Mapping from task ID to "hostname:port" string.
+     * If the `name` field contains "worker", and the `tasks` map contains a
+     * mapping from 7 to "example.org:2222", then the device prefix
+     * "/job:worker/task:7" will be assigned to "example.org:2222".
+     * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+     * supported.
+     * 
+ * + * map<int32, string> tasks = 2; + */ + + public java.util.Map getTasksMap() { + return internalGetTasks().getMap(); + } + /** + *
+     * Mapping from task ID to "hostname:port" string.
+     * If the `name` field contains "worker", and the `tasks` map contains a
+     * mapping from 7 to "example.org:2222", then the device prefix
+     * "/job:worker/task:7" will be assigned to "example.org:2222".
+     * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+     * supported.
+     * 
+ * + * map<int32, string> tasks = 2; + */ + + public java.lang.String getTasksOrDefault( + int key, + java.lang.String defaultValue) { + + java.util.Map map = + internalGetTasks().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Mapping from task ID to "hostname:port" string.
+     * If the `name` field contains "worker", and the `tasks` map contains a
+     * mapping from 7 to "example.org:2222", then the device prefix
+     * "/job:worker/task:7" will be assigned to "example.org:2222".
+     * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+     * supported.
+     * 
+ * + * map<int32, string> tasks = 2; + */ + + public java.lang.String getTasksOrThrow( + int key) { + + java.util.Map map = + internalGetTasks().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearTasks() { + getMutableTasks().clear(); + return this; + } + /** + *
+     * Mapping from task ID to "hostname:port" string.
+     * If the `name` field contains "worker", and the `tasks` map contains a
+     * mapping from 7 to "example.org:2222", then the device prefix
+     * "/job:worker/task:7" will be assigned to "example.org:2222".
+     * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+     * supported.
+     * 
+ * + * map<int32, string> tasks = 2; + */ + + public Builder removeTasks( + int key) { + + getMutableTasks().remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableTasks() { + return internalGetMutableTasks().getMutableMap(); + } + /** + *
+     * Mapping from task ID to "hostname:port" string.
+     * If the `name` field contains "worker", and the `tasks` map contains a
+     * mapping from 7 to "example.org:2222", then the device prefix
+     * "/job:worker/task:7" will be assigned to "example.org:2222".
+     * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+     * supported.
+     * 
+ * + * map<int32, string> tasks = 2; + */ + public Builder putTasks( + int key, + java.lang.String value) { + + if (value == null) { throw new java.lang.NullPointerException(); } + getMutableTasks().put(key, value); + return this; + } + /** + *
+     * Mapping from task ID to "hostname:port" string.
+     * If the `name` field contains "worker", and the `tasks` map contains a
+     * mapping from 7 to "example.org:2222", then the device prefix
+     * "/job:worker/task:7" will be assigned to "example.org:2222".
+     * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+     * supported.
+     * 
+ * + * map<int32, string> tasks = 2; + */ + + public Builder putAllTasks( + java.util.Map values) { + getMutableTasks().putAll(values); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.JobDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.JobDef) + private static final org.tensorflow.distruntime.JobDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.distruntime.JobDef(); + } + + public static org.tensorflow.distruntime.JobDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public JobDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new JobDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.distruntime.JobDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/JobDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/JobDefOrBuilder.java new file mode 100644 index 0000000000000..5966807548ea6 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/JobDefOrBuilder.java @@ -0,0 +1,106 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/tensorflow_server.proto + +package org.tensorflow.distruntime; + +public interface JobDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.JobDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * The name of this job.
+   * 
+ * + * optional string name = 1; + */ + java.lang.String getName(); + /** + *
+   * The name of this job.
+   * 
+ * + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * Mapping from task ID to "hostname:port" string.
+   * If the `name` field contains "worker", and the `tasks` map contains a
+   * mapping from 7 to "example.org:2222", then the device prefix
+   * "/job:worker/task:7" will be assigned to "example.org:2222".
+   * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+   * supported.
+   * 
+ * + * map<int32, string> tasks = 2; + */ + int getTasksCount(); + /** + *
+   * Mapping from task ID to "hostname:port" string.
+   * If the `name` field contains "worker", and the `tasks` map contains a
+   * mapping from 7 to "example.org:2222", then the device prefix
+   * "/job:worker/task:7" will be assigned to "example.org:2222".
+   * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+   * supported.
+   * 
+ * + * map<int32, string> tasks = 2; + */ + boolean containsTasks( + int key); + /** + * Use {@link #getTasksMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getTasks(); + /** + *
+   * Mapping from task ID to "hostname:port" string.
+   * If the `name` field contains "worker", and the `tasks` map contains a
+   * mapping from 7 to "example.org:2222", then the device prefix
+   * "/job:worker/task:7" will be assigned to "example.org:2222".
+   * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+   * supported.
+   * 
+ * + * map<int32, string> tasks = 2; + */ + java.util.Map + getTasksMap(); + /** + *
+   * Mapping from task ID to "hostname:port" string.
+   * If the `name` field contains "worker", and the `tasks` map contains a
+   * mapping from 7 to "example.org:2222", then the device prefix
+   * "/job:worker/task:7" will be assigned to "example.org:2222".
+   * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+   * supported.
+   * 
+ * + * map<int32, string> tasks = 2; + */ + + java.lang.String getTasksOrDefault( + int key, + java.lang.String defaultValue); + /** + *
+   * Mapping from task ID to "hostname:port" string.
+   * If the `name` field contains "worker", and the `tasks` map contains a
+   * mapping from 7 to "example.org:2222", then the device prefix
+   * "/job:worker/task:7" will be assigned to "example.org:2222".
+   * NOTE(mrry): Currently, only a dense task ID space starting at 0 is
+   * supported.
+   * 
+ * + * map<int32, string> tasks = 2; + */ + + java.lang.String getTasksOrThrow( + int key); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerDef.java new file mode 100644 index 0000000000000..4fbf962ecc9b0 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerDef.java @@ -0,0 +1,1246 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/tensorflow_server.proto + +package org.tensorflow.distruntime; + +/** + *
+ * Defines the configuration of a single TensorFlow server.
+ * 
+ * + * Protobuf type {@code tensorflow.ServerDef} + */ +public final class ServerDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.ServerDef) + ServerDefOrBuilder { + // Use ServerDef.newBuilder() to construct. + private ServerDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ServerDef() { + jobName_ = ""; + taskIndex_ = 0; + protocol_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private ServerDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + org.tensorflow.distruntime.ClusterDef.Builder subBuilder = null; + if (cluster_ != null) { + subBuilder = cluster_.toBuilder(); + } + cluster_ = input.readMessage(org.tensorflow.distruntime.ClusterDef.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(cluster_); + cluster_ = subBuilder.buildPartial(); + } + + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + jobName_ = s; + break; + } + case 24: { + + taskIndex_ = input.readInt32(); + break; + } + case 34: { + org.tensorflow.framework.ConfigProto.Builder subBuilder = null; + if (defaultSessionConfig_ != null) { + subBuilder = defaultSessionConfig_.toBuilder(); + } + defaultSessionConfig_ = input.readMessage(org.tensorflow.framework.ConfigProto.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(defaultSessionConfig_); + defaultSessionConfig_ = subBuilder.buildPartial(); + } + + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + protocol_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ServerDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ServerDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.distruntime.ServerDef.class, org.tensorflow.distruntime.ServerDef.Builder.class); + } + + public static final int CLUSTER_FIELD_NUMBER = 1; + private org.tensorflow.distruntime.ClusterDef cluster_; + /** + *
+   * The cluster of which this server is a member.
+   * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public boolean hasCluster() { + return cluster_ != null; + } + /** + *
+   * The cluster of which this server is a member.
+   * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public org.tensorflow.distruntime.ClusterDef getCluster() { + return cluster_ == null ? org.tensorflow.distruntime.ClusterDef.getDefaultInstance() : cluster_; + } + /** + *
+   * The cluster of which this server is a member.
+   * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public org.tensorflow.distruntime.ClusterDefOrBuilder getClusterOrBuilder() { + return getCluster(); + } + + public static final int JOB_NAME_FIELD_NUMBER = 2; + private volatile java.lang.Object jobName_; + /** + *
+   * The name of the job of which this server is a member.
+   * NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
+   * that matches this name.
+   * 
+ * + * optional string job_name = 2; + */ + public java.lang.String getJobName() { + java.lang.Object ref = jobName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + jobName_ = s; + return s; + } + } + /** + *
+   * The name of the job of which this server is a member.
+   * NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
+   * that matches this name.
+   * 
+ * + * optional string job_name = 2; + */ + public com.google.protobuf.ByteString + getJobNameBytes() { + java.lang.Object ref = jobName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + jobName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TASK_INDEX_FIELD_NUMBER = 3; + private int taskIndex_; + /** + *
+   * The task index of this server in its job.
+   * NOTE: The `cluster` field must contain a `JobDef` with a matching `name`
+   * and a mapping in its `tasks` field for this index.
+   * 
+ * + * optional int32 task_index = 3; + */ + public int getTaskIndex() { + return taskIndex_; + } + + public static final int DEFAULT_SESSION_CONFIG_FIELD_NUMBER = 4; + private org.tensorflow.framework.ConfigProto defaultSessionConfig_; + /** + *
+   * The default configuration for sessions that run on this server.
+   * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public boolean hasDefaultSessionConfig() { + return defaultSessionConfig_ != null; + } + /** + *
+   * The default configuration for sessions that run on this server.
+   * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public org.tensorflow.framework.ConfigProto getDefaultSessionConfig() { + return defaultSessionConfig_ == null ? org.tensorflow.framework.ConfigProto.getDefaultInstance() : defaultSessionConfig_; + } + /** + *
+   * The default configuration for sessions that run on this server.
+   * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public org.tensorflow.framework.ConfigProtoOrBuilder getDefaultSessionConfigOrBuilder() { + return getDefaultSessionConfig(); + } + + public static final int PROTOCOL_FIELD_NUMBER = 5; + private volatile java.lang.Object protocol_; + /** + *
+   * The protocol to be used by this server.
+   * Acceptable values include: "grpc".
+   * 
+ * + * optional string protocol = 5; + */ + public java.lang.String getProtocol() { + java.lang.Object ref = protocol_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + protocol_ = s; + return s; + } + } + /** + *
+   * The protocol to be used by this server.
+   * Acceptable values include: "grpc".
+   * 
+ * + * optional string protocol = 5; + */ + public com.google.protobuf.ByteString + getProtocolBytes() { + java.lang.Object ref = protocol_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + protocol_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (cluster_ != null) { + output.writeMessage(1, getCluster()); + } + if (!getJobNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, jobName_); + } + if (taskIndex_ != 0) { + output.writeInt32(3, taskIndex_); + } + if (defaultSessionConfig_ != null) { + output.writeMessage(4, getDefaultSessionConfig()); + } + if (!getProtocolBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, protocol_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (cluster_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getCluster()); + } + if (!getJobNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, jobName_); + } + if (taskIndex_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, taskIndex_); + } + if (defaultSessionConfig_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getDefaultSessionConfig()); + } + if (!getProtocolBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, protocol_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.distruntime.ServerDef)) { + return super.equals(obj); + } + org.tensorflow.distruntime.ServerDef other = (org.tensorflow.distruntime.ServerDef) obj; + + boolean result = true; + result = result && (hasCluster() == other.hasCluster()); + if (hasCluster()) { + result = result && getCluster() + .equals(other.getCluster()); + } + result = result && getJobName() + .equals(other.getJobName()); + result = result && (getTaskIndex() + == other.getTaskIndex()); + result = result && (hasDefaultSessionConfig() == other.hasDefaultSessionConfig()); + if (hasDefaultSessionConfig()) { + result = result && getDefaultSessionConfig() + .equals(other.getDefaultSessionConfig()); + } + result = result && getProtocol() + .equals(other.getProtocol()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCluster()) { + hash = (37 * hash) + CLUSTER_FIELD_NUMBER; + hash = (53 * hash) + getCluster().hashCode(); + } + hash = (37 * hash) + JOB_NAME_FIELD_NUMBER; + hash = (53 * hash) + getJobName().hashCode(); + hash = (37 * hash) + TASK_INDEX_FIELD_NUMBER; + hash = (53 * hash) + getTaskIndex(); + if (hasDefaultSessionConfig()) { + hash = (37 * hash) + DEFAULT_SESSION_CONFIG_FIELD_NUMBER; + hash = (53 * hash) + getDefaultSessionConfig().hashCode(); + } + hash = (37 * hash) + PROTOCOL_FIELD_NUMBER; + hash = (53 * hash) + getProtocol().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.distruntime.ServerDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.distruntime.ServerDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.distruntime.ServerDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.distruntime.ServerDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.distruntime.ServerDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.distruntime.ServerDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.distruntime.ServerDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.distruntime.ServerDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.distruntime.ServerDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.distruntime.ServerDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.distruntime.ServerDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Defines the configuration of a single TensorFlow server.
+   * 
+ * + * Protobuf type {@code tensorflow.ServerDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.ServerDef) + org.tensorflow.distruntime.ServerDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ServerDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ServerDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.distruntime.ServerDef.class, org.tensorflow.distruntime.ServerDef.Builder.class); + } + + // Construct using org.tensorflow.distruntime.ServerDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + if (clusterBuilder_ == null) { + cluster_ = null; + } else { + cluster_ = null; + clusterBuilder_ = null; + } + jobName_ = ""; + + taskIndex_ = 0; + + if (defaultSessionConfigBuilder_ == null) { + defaultSessionConfig_ = null; + } else { + defaultSessionConfig_ = null; + defaultSessionConfigBuilder_ = null; + } + protocol_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.distruntime.ServerProtos.internal_static_tensorflow_ServerDef_descriptor; + } + + public org.tensorflow.distruntime.ServerDef getDefaultInstanceForType() { + return org.tensorflow.distruntime.ServerDef.getDefaultInstance(); + } + + public org.tensorflow.distruntime.ServerDef build() { + org.tensorflow.distruntime.ServerDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.distruntime.ServerDef buildPartial() { + org.tensorflow.distruntime.ServerDef result = new org.tensorflow.distruntime.ServerDef(this); + if (clusterBuilder_ == null) { + result.cluster_ = cluster_; + } else { + result.cluster_ = clusterBuilder_.build(); + } + result.jobName_ = jobName_; + result.taskIndex_ = taskIndex_; + if (defaultSessionConfigBuilder_ == null) { + result.defaultSessionConfig_ = defaultSessionConfig_; + } else { + result.defaultSessionConfig_ = defaultSessionConfigBuilder_.build(); + } + result.protocol_ = protocol_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.distruntime.ServerDef) { + return mergeFrom((org.tensorflow.distruntime.ServerDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.distruntime.ServerDef other) { + if (other == org.tensorflow.distruntime.ServerDef.getDefaultInstance()) return this; + if (other.hasCluster()) { + mergeCluster(other.getCluster()); + } + if (!other.getJobName().isEmpty()) { + jobName_ = other.jobName_; + onChanged(); + } + if (other.getTaskIndex() != 0) { + setTaskIndex(other.getTaskIndex()); + } + if (other.hasDefaultSessionConfig()) { + mergeDefaultSessionConfig(other.getDefaultSessionConfig()); + } + if (!other.getProtocol().isEmpty()) { + protocol_ = other.protocol_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.distruntime.ServerDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.distruntime.ServerDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private org.tensorflow.distruntime.ClusterDef cluster_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.distruntime.ClusterDef, org.tensorflow.distruntime.ClusterDef.Builder, org.tensorflow.distruntime.ClusterDefOrBuilder> clusterBuilder_; + /** + *
+     * The cluster of which this server is a member.
+     * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public boolean hasCluster() { + return clusterBuilder_ != null || cluster_ != null; + } + /** + *
+     * The cluster of which this server is a member.
+     * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public org.tensorflow.distruntime.ClusterDef getCluster() { + if (clusterBuilder_ == null) { + return cluster_ == null ? org.tensorflow.distruntime.ClusterDef.getDefaultInstance() : cluster_; + } else { + return clusterBuilder_.getMessage(); + } + } + /** + *
+     * The cluster of which this server is a member.
+     * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public Builder setCluster(org.tensorflow.distruntime.ClusterDef value) { + if (clusterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + cluster_ = value; + onChanged(); + } else { + clusterBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * The cluster of which this server is a member.
+     * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public Builder setCluster( + org.tensorflow.distruntime.ClusterDef.Builder builderForValue) { + if (clusterBuilder_ == null) { + cluster_ = builderForValue.build(); + onChanged(); + } else { + clusterBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * The cluster of which this server is a member.
+     * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public Builder mergeCluster(org.tensorflow.distruntime.ClusterDef value) { + if (clusterBuilder_ == null) { + if (cluster_ != null) { + cluster_ = + org.tensorflow.distruntime.ClusterDef.newBuilder(cluster_).mergeFrom(value).buildPartial(); + } else { + cluster_ = value; + } + onChanged(); + } else { + clusterBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * The cluster of which this server is a member.
+     * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public Builder clearCluster() { + if (clusterBuilder_ == null) { + cluster_ = null; + onChanged(); + } else { + cluster_ = null; + clusterBuilder_ = null; + } + + return this; + } + /** + *
+     * The cluster of which this server is a member.
+     * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public org.tensorflow.distruntime.ClusterDef.Builder getClusterBuilder() { + + onChanged(); + return getClusterFieldBuilder().getBuilder(); + } + /** + *
+     * The cluster of which this server is a member.
+     * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + public org.tensorflow.distruntime.ClusterDefOrBuilder getClusterOrBuilder() { + if (clusterBuilder_ != null) { + return clusterBuilder_.getMessageOrBuilder(); + } else { + return cluster_ == null ? + org.tensorflow.distruntime.ClusterDef.getDefaultInstance() : cluster_; + } + } + /** + *
+     * The cluster of which this server is a member.
+     * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.distruntime.ClusterDef, org.tensorflow.distruntime.ClusterDef.Builder, org.tensorflow.distruntime.ClusterDefOrBuilder> + getClusterFieldBuilder() { + if (clusterBuilder_ == null) { + clusterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.distruntime.ClusterDef, org.tensorflow.distruntime.ClusterDef.Builder, org.tensorflow.distruntime.ClusterDefOrBuilder>( + getCluster(), + getParentForChildren(), + isClean()); + cluster_ = null; + } + return clusterBuilder_; + } + + private java.lang.Object jobName_ = ""; + /** + *
+     * The name of the job of which this server is a member.
+     * NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
+     * that matches this name.
+     * 
+ * + * optional string job_name = 2; + */ + public java.lang.String getJobName() { + java.lang.Object ref = jobName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + jobName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * The name of the job of which this server is a member.
+     * NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
+     * that matches this name.
+     * 
+ * + * optional string job_name = 2; + */ + public com.google.protobuf.ByteString + getJobNameBytes() { + java.lang.Object ref = jobName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + jobName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * The name of the job of which this server is a member.
+     * NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
+     * that matches this name.
+     * 
+ * + * optional string job_name = 2; + */ + public Builder setJobName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + jobName_ = value; + onChanged(); + return this; + } + /** + *
+     * The name of the job of which this server is a member.
+     * NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
+     * that matches this name.
+     * 
+ * + * optional string job_name = 2; + */ + public Builder clearJobName() { + + jobName_ = getDefaultInstance().getJobName(); + onChanged(); + return this; + } + /** + *
+     * The name of the job of which this server is a member.
+     * NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
+     * that matches this name.
+     * 
+ * + * optional string job_name = 2; + */ + public Builder setJobNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + jobName_ = value; + onChanged(); + return this; + } + + private int taskIndex_ ; + /** + *
+     * The task index of this server in its job.
+     * NOTE: The `cluster` field must contain a `JobDef` with a matching `name`
+     * and a mapping in its `tasks` field for this index.
+     * 
+ * + * optional int32 task_index = 3; + */ + public int getTaskIndex() { + return taskIndex_; + } + /** + *
+     * The task index of this server in its job.
+     * NOTE: The `cluster` field must contain a `JobDef` with a matching `name`
+     * and a mapping in its `tasks` field for this index.
+     * 
+ * + * optional int32 task_index = 3; + */ + public Builder setTaskIndex(int value) { + + taskIndex_ = value; + onChanged(); + return this; + } + /** + *
+     * The task index of this server in its job.
+     * NOTE: The `cluster` field must contain a `JobDef` with a matching `name`
+     * and a mapping in its `tasks` field for this index.
+     * 
+ * + * optional int32 task_index = 3; + */ + public Builder clearTaskIndex() { + + taskIndex_ = 0; + onChanged(); + return this; + } + + private org.tensorflow.framework.ConfigProto defaultSessionConfig_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.ConfigProto, org.tensorflow.framework.ConfigProto.Builder, org.tensorflow.framework.ConfigProtoOrBuilder> defaultSessionConfigBuilder_; + /** + *
+     * The default configuration for sessions that run on this server.
+     * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public boolean hasDefaultSessionConfig() { + return defaultSessionConfigBuilder_ != null || defaultSessionConfig_ != null; + } + /** + *
+     * The default configuration for sessions that run on this server.
+     * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public org.tensorflow.framework.ConfigProto getDefaultSessionConfig() { + if (defaultSessionConfigBuilder_ == null) { + return defaultSessionConfig_ == null ? org.tensorflow.framework.ConfigProto.getDefaultInstance() : defaultSessionConfig_; + } else { + return defaultSessionConfigBuilder_.getMessage(); + } + } + /** + *
+     * The default configuration for sessions that run on this server.
+     * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public Builder setDefaultSessionConfig(org.tensorflow.framework.ConfigProto value) { + if (defaultSessionConfigBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + defaultSessionConfig_ = value; + onChanged(); + } else { + defaultSessionConfigBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * The default configuration for sessions that run on this server.
+     * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public Builder setDefaultSessionConfig( + org.tensorflow.framework.ConfigProto.Builder builderForValue) { + if (defaultSessionConfigBuilder_ == null) { + defaultSessionConfig_ = builderForValue.build(); + onChanged(); + } else { + defaultSessionConfigBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * The default configuration for sessions that run on this server.
+     * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public Builder mergeDefaultSessionConfig(org.tensorflow.framework.ConfigProto value) { + if (defaultSessionConfigBuilder_ == null) { + if (defaultSessionConfig_ != null) { + defaultSessionConfig_ = + org.tensorflow.framework.ConfigProto.newBuilder(defaultSessionConfig_).mergeFrom(value).buildPartial(); + } else { + defaultSessionConfig_ = value; + } + onChanged(); + } else { + defaultSessionConfigBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * The default configuration for sessions that run on this server.
+     * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public Builder clearDefaultSessionConfig() { + if (defaultSessionConfigBuilder_ == null) { + defaultSessionConfig_ = null; + onChanged(); + } else { + defaultSessionConfig_ = null; + defaultSessionConfigBuilder_ = null; + } + + return this; + } + /** + *
+     * The default configuration for sessions that run on this server.
+     * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public org.tensorflow.framework.ConfigProto.Builder getDefaultSessionConfigBuilder() { + + onChanged(); + return getDefaultSessionConfigFieldBuilder().getBuilder(); + } + /** + *
+     * The default configuration for sessions that run on this server.
+     * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + public org.tensorflow.framework.ConfigProtoOrBuilder getDefaultSessionConfigOrBuilder() { + if (defaultSessionConfigBuilder_ != null) { + return defaultSessionConfigBuilder_.getMessageOrBuilder(); + } else { + return defaultSessionConfig_ == null ? + org.tensorflow.framework.ConfigProto.getDefaultInstance() : defaultSessionConfig_; + } + } + /** + *
+     * The default configuration for sessions that run on this server.
+     * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.ConfigProto, org.tensorflow.framework.ConfigProto.Builder, org.tensorflow.framework.ConfigProtoOrBuilder> + getDefaultSessionConfigFieldBuilder() { + if (defaultSessionConfigBuilder_ == null) { + defaultSessionConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.ConfigProto, org.tensorflow.framework.ConfigProto.Builder, org.tensorflow.framework.ConfigProtoOrBuilder>( + getDefaultSessionConfig(), + getParentForChildren(), + isClean()); + defaultSessionConfig_ = null; + } + return defaultSessionConfigBuilder_; + } + + private java.lang.Object protocol_ = ""; + /** + *
+     * The protocol to be used by this server.
+     * Acceptable values include: "grpc".
+     * 
+ * + * optional string protocol = 5; + */ + public java.lang.String getProtocol() { + java.lang.Object ref = protocol_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + protocol_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * The protocol to be used by this server.
+     * Acceptable values include: "grpc".
+     * 
+ * + * optional string protocol = 5; + */ + public com.google.protobuf.ByteString + getProtocolBytes() { + java.lang.Object ref = protocol_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + protocol_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * The protocol to be used by this server.
+     * Acceptable values include: "grpc".
+     * 
+ * + * optional string protocol = 5; + */ + public Builder setProtocol( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + protocol_ = value; + onChanged(); + return this; + } + /** + *
+     * The protocol to be used by this server.
+     * Acceptable values include: "grpc".
+     * 
+ * + * optional string protocol = 5; + */ + public Builder clearProtocol() { + + protocol_ = getDefaultInstance().getProtocol(); + onChanged(); + return this; + } + /** + *
+     * The protocol to be used by this server.
+     * Acceptable values include: "grpc".
+     * 
+ * + * optional string protocol = 5; + */ + public Builder setProtocolBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + protocol_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.ServerDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.ServerDef) + private static final org.tensorflow.distruntime.ServerDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.distruntime.ServerDef(); + } + + public static org.tensorflow.distruntime.ServerDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ServerDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ServerDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.distruntime.ServerDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerDefOrBuilder.java new file mode 100644 index 0000000000000..eb40dc49285c5 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerDefOrBuilder.java @@ -0,0 +1,112 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/tensorflow_server.proto + +package org.tensorflow.distruntime; + +public interface ServerDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.ServerDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * The cluster of which this server is a member.
+   * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + boolean hasCluster(); + /** + *
+   * The cluster of which this server is a member.
+   * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + org.tensorflow.distruntime.ClusterDef getCluster(); + /** + *
+   * The cluster of which this server is a member.
+   * 
+ * + * optional .tensorflow.ClusterDef cluster = 1; + */ + org.tensorflow.distruntime.ClusterDefOrBuilder getClusterOrBuilder(); + + /** + *
+   * The name of the job of which this server is a member.
+   * NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
+   * that matches this name.
+   * 
+ * + * optional string job_name = 2; + */ + java.lang.String getJobName(); + /** + *
+   * The name of the job of which this server is a member.
+   * NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
+   * that matches this name.
+   * 
+ * + * optional string job_name = 2; + */ + com.google.protobuf.ByteString + getJobNameBytes(); + + /** + *
+   * The task index of this server in its job.
+   * NOTE: The `cluster` field must contain a `JobDef` with a matching `name`
+   * and a mapping in its `tasks` field for this index.
+   * 
+ * + * optional int32 task_index = 3; + */ + int getTaskIndex(); + + /** + *
+   * The default configuration for sessions that run on this server.
+   * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + boolean hasDefaultSessionConfig(); + /** + *
+   * The default configuration for sessions that run on this server.
+   * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + org.tensorflow.framework.ConfigProto getDefaultSessionConfig(); + /** + *
+   * The default configuration for sessions that run on this server.
+   * 
+ * + * optional .tensorflow.ConfigProto default_session_config = 4; + */ + org.tensorflow.framework.ConfigProtoOrBuilder getDefaultSessionConfigOrBuilder(); + + /** + *
+   * The protocol to be used by this server.
+   * Acceptable values include: "grpc".
+   * 
+ * + * optional string protocol = 5; + */ + java.lang.String getProtocol(); + /** + *
+   * The protocol to be used by this server.
+   * Acceptable values include: "grpc".
+   * 
+ * + * optional string protocol = 5; + */ + com.google.protobuf.ByteString + getProtocolBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerProtos.java new file mode 100644 index 0000000000000..03593fd0ebdd0 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/distruntime/ServerProtos.java @@ -0,0 +1,101 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/tensorflow_server.proto + +package org.tensorflow.distruntime; + +public final class ServerProtos { + private ServerProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_JobDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_JobDef_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_JobDef_TasksEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_JobDef_TasksEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_ClusterDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_ClusterDef_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_ServerDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_ServerDef_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n0tensorflow/core/protobuf/tensorflow_se" + + "rver.proto\022\ntensorflow\032%tensorflow/core/" + + "protobuf/config.proto\"r\n\006JobDef\022\014\n\004name\030" + + "\001 \001(\t\022,\n\005tasks\030\002 \003(\0132\035.tensorflow.JobDef" + + ".TasksEntry\032,\n\nTasksEntry\022\013\n\003key\030\001 \001(\005\022\r" + + "\n\005value\030\002 \001(\t:\0028\001\"-\n\nClusterDef\022\037\n\003job\030\001" + + " \003(\0132\022.tensorflow.JobDef\"\245\001\n\tServerDef\022\'" + + "\n\007cluster\030\001 \001(\0132\026.tensorflow.ClusterDef\022" + + "\020\n\010job_name\030\002 \001(\t\022\022\n\ntask_index\030\003 \001(\005\0227\n" + + "\026default_session_config\030\004 \001(\0132\027.tensorfl", + "ow.ConfigProto\022\020\n\010protocol\030\005 \001(\tB/\n\032org." + + "tensorflow.distruntimeB\014ServerProtosP\001\370\001" + + "\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.ConfigProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_JobDef_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_JobDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_JobDef_descriptor, + new java.lang.String[] { "Name", "Tasks", }); + internal_static_tensorflow_JobDef_TasksEntry_descriptor = + internal_static_tensorflow_JobDef_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_JobDef_TasksEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_JobDef_TasksEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_tensorflow_ClusterDef_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_tensorflow_ClusterDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_ClusterDef_descriptor, + new java.lang.String[] { "Job", }); + internal_static_tensorflow_ServerDef_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_tensorflow_ServerDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_ServerDef_descriptor, + new java.lang.String[] { "Cluster", "JobName", "TaskIndex", "DefaultSessionConfig", "Protocol", }); + org.tensorflow.framework.ConfigProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescription.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescription.java new file mode 100644 index 0000000000000..c201e3351c5e1 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescription.java @@ -0,0 +1,897 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/allocation_description.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.AllocationDescription} + */ +public final class AllocationDescription extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.AllocationDescription) + AllocationDescriptionOrBuilder { + // Use AllocationDescription.newBuilder() to construct. + private AllocationDescription(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private AllocationDescription() { + requestedBytes_ = 0L; + allocatedBytes_ = 0L; + allocatorName_ = ""; + allocationId_ = 0L; + hasSingleReference_ = false; + ptr_ = 0L; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private AllocationDescription( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + requestedBytes_ = input.readInt64(); + break; + } + case 16: { + + allocatedBytes_ = input.readInt64(); + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + + allocatorName_ = s; + break; + } + case 32: { + + allocationId_ = input.readInt64(); + break; + } + case 40: { + + hasSingleReference_ = input.readBool(); + break; + } + case 48: { + + ptr_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.AllocationDescriptionProtos.internal_static_tensorflow_AllocationDescription_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.AllocationDescriptionProtos.internal_static_tensorflow_AllocationDescription_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.AllocationDescription.class, org.tensorflow.framework.AllocationDescription.Builder.class); + } + + public static final int REQUESTED_BYTES_FIELD_NUMBER = 1; + private long requestedBytes_; + /** + *
+   * Total number of bytes requested
+   * 
+ * + * optional int64 requested_bytes = 1; + */ + public long getRequestedBytes() { + return requestedBytes_; + } + + public static final int ALLOCATED_BYTES_FIELD_NUMBER = 2; + private long allocatedBytes_; + /** + *
+   * Total number of bytes allocated if known
+   * 
+ * + * optional int64 allocated_bytes = 2; + */ + public long getAllocatedBytes() { + return allocatedBytes_; + } + + public static final int ALLOCATOR_NAME_FIELD_NUMBER = 3; + private volatile java.lang.Object allocatorName_; + /** + *
+   * Name of the allocator used
+   * 
+ * + * optional string allocator_name = 3; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } + } + /** + *
+   * Name of the allocator used
+   * 
+ * + * optional string allocator_name = 3; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ALLOCATION_ID_FIELD_NUMBER = 4; + private long allocationId_; + /** + *
+   * Identifier of the allocated buffer if known
+   * 
+ * + * optional int64 allocation_id = 4; + */ + public long getAllocationId() { + return allocationId_; + } + + public static final int HAS_SINGLE_REFERENCE_FIELD_NUMBER = 5; + private boolean hasSingleReference_; + /** + *
+   * Set if this tensor only has one remaining reference
+   * 
+ * + * optional bool has_single_reference = 5; + */ + public boolean getHasSingleReference() { + return hasSingleReference_; + } + + public static final int PTR_FIELD_NUMBER = 6; + private long ptr_; + /** + *
+   * Address of the allocation.
+   * 
+ * + * optional uint64 ptr = 6; + */ + public long getPtr() { + return ptr_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (requestedBytes_ != 0L) { + output.writeInt64(1, requestedBytes_); + } + if (allocatedBytes_ != 0L) { + output.writeInt64(2, allocatedBytes_); + } + if (!getAllocatorNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, allocatorName_); + } + if (allocationId_ != 0L) { + output.writeInt64(4, allocationId_); + } + if (hasSingleReference_ != false) { + output.writeBool(5, hasSingleReference_); + } + if (ptr_ != 0L) { + output.writeUInt64(6, ptr_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (requestedBytes_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, requestedBytes_); + } + if (allocatedBytes_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, allocatedBytes_); + } + if (!getAllocatorNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, allocatorName_); + } + if (allocationId_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(4, allocationId_); + } + if (hasSingleReference_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, hasSingleReference_); + } + if (ptr_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(6, ptr_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.AllocationDescription)) { + return super.equals(obj); + } + org.tensorflow.framework.AllocationDescription other = (org.tensorflow.framework.AllocationDescription) obj; + + boolean result = true; + result = result && (getRequestedBytes() + == other.getRequestedBytes()); + result = result && (getAllocatedBytes() + == other.getAllocatedBytes()); + result = result && getAllocatorName() + .equals(other.getAllocatorName()); + result = result && (getAllocationId() + == other.getAllocationId()); + result = result && (getHasSingleReference() + == other.getHasSingleReference()); + result = result && (getPtr() + == other.getPtr()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + REQUESTED_BYTES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getRequestedBytes()); + hash = (37 * hash) + ALLOCATED_BYTES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAllocatedBytes()); + hash = (37 * hash) + ALLOCATOR_NAME_FIELD_NUMBER; + hash = (53 * hash) + getAllocatorName().hashCode(); + hash = (37 * hash) + ALLOCATION_ID_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAllocationId()); + hash = (37 * hash) + HAS_SINGLE_REFERENCE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getHasSingleReference()); + hash = (37 * hash) + PTR_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getPtr()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.AllocationDescription parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.AllocationDescription parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.AllocationDescription parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.AllocationDescription parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.AllocationDescription parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AllocationDescription parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.AllocationDescription parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AllocationDescription parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.AllocationDescription parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AllocationDescription parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.AllocationDescription prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.AllocationDescription} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.AllocationDescription) + org.tensorflow.framework.AllocationDescriptionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.AllocationDescriptionProtos.internal_static_tensorflow_AllocationDescription_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.AllocationDescriptionProtos.internal_static_tensorflow_AllocationDescription_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.AllocationDescription.class, org.tensorflow.framework.AllocationDescription.Builder.class); + } + + // Construct using org.tensorflow.framework.AllocationDescription.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + requestedBytes_ = 0L; + + allocatedBytes_ = 0L; + + allocatorName_ = ""; + + allocationId_ = 0L; + + hasSingleReference_ = false; + + ptr_ = 0L; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.AllocationDescriptionProtos.internal_static_tensorflow_AllocationDescription_descriptor; + } + + public org.tensorflow.framework.AllocationDescription getDefaultInstanceForType() { + return org.tensorflow.framework.AllocationDescription.getDefaultInstance(); + } + + public org.tensorflow.framework.AllocationDescription build() { + org.tensorflow.framework.AllocationDescription result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.AllocationDescription buildPartial() { + org.tensorflow.framework.AllocationDescription result = new org.tensorflow.framework.AllocationDescription(this); + result.requestedBytes_ = requestedBytes_; + result.allocatedBytes_ = allocatedBytes_; + result.allocatorName_ = allocatorName_; + result.allocationId_ = allocationId_; + result.hasSingleReference_ = hasSingleReference_; + result.ptr_ = ptr_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.AllocationDescription) { + return mergeFrom((org.tensorflow.framework.AllocationDescription)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.AllocationDescription other) { + if (other == org.tensorflow.framework.AllocationDescription.getDefaultInstance()) return this; + if (other.getRequestedBytes() != 0L) { + setRequestedBytes(other.getRequestedBytes()); + } + if (other.getAllocatedBytes() != 0L) { + setAllocatedBytes(other.getAllocatedBytes()); + } + if (!other.getAllocatorName().isEmpty()) { + allocatorName_ = other.allocatorName_; + onChanged(); + } + if (other.getAllocationId() != 0L) { + setAllocationId(other.getAllocationId()); + } + if (other.getHasSingleReference() != false) { + setHasSingleReference(other.getHasSingleReference()); + } + if (other.getPtr() != 0L) { + setPtr(other.getPtr()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.AllocationDescription parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.AllocationDescription) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private long requestedBytes_ ; + /** + *
+     * Total number of bytes requested
+     * 
+ * + * optional int64 requested_bytes = 1; + */ + public long getRequestedBytes() { + return requestedBytes_; + } + /** + *
+     * Total number of bytes requested
+     * 
+ * + * optional int64 requested_bytes = 1; + */ + public Builder setRequestedBytes(long value) { + + requestedBytes_ = value; + onChanged(); + return this; + } + /** + *
+     * Total number of bytes requested
+     * 
+ * + * optional int64 requested_bytes = 1; + */ + public Builder clearRequestedBytes() { + + requestedBytes_ = 0L; + onChanged(); + return this; + } + + private long allocatedBytes_ ; + /** + *
+     * Total number of bytes allocated if known
+     * 
+ * + * optional int64 allocated_bytes = 2; + */ + public long getAllocatedBytes() { + return allocatedBytes_; + } + /** + *
+     * Total number of bytes allocated if known
+     * 
+ * + * optional int64 allocated_bytes = 2; + */ + public Builder setAllocatedBytes(long value) { + + allocatedBytes_ = value; + onChanged(); + return this; + } + /** + *
+     * Total number of bytes allocated if known
+     * 
+ * + * optional int64 allocated_bytes = 2; + */ + public Builder clearAllocatedBytes() { + + allocatedBytes_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object allocatorName_ = ""; + /** + *
+     * Name of the allocator used
+     * 
+ * + * optional string allocator_name = 3; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the allocator used
+     * 
+ * + * optional string allocator_name = 3; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the allocator used
+     * 
+ * + * optional string allocator_name = 3; + */ + public Builder setAllocatorName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + allocatorName_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the allocator used
+     * 
+ * + * optional string allocator_name = 3; + */ + public Builder clearAllocatorName() { + + allocatorName_ = getDefaultInstance().getAllocatorName(); + onChanged(); + return this; + } + /** + *
+     * Name of the allocator used
+     * 
+ * + * optional string allocator_name = 3; + */ + public Builder setAllocatorNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + allocatorName_ = value; + onChanged(); + return this; + } + + private long allocationId_ ; + /** + *
+     * Identifier of the allocated buffer if known
+     * 
+ * + * optional int64 allocation_id = 4; + */ + public long getAllocationId() { + return allocationId_; + } + /** + *
+     * Identifier of the allocated buffer if known
+     * 
+ * + * optional int64 allocation_id = 4; + */ + public Builder setAllocationId(long value) { + + allocationId_ = value; + onChanged(); + return this; + } + /** + *
+     * Identifier of the allocated buffer if known
+     * 
+ * + * optional int64 allocation_id = 4; + */ + public Builder clearAllocationId() { + + allocationId_ = 0L; + onChanged(); + return this; + } + + private boolean hasSingleReference_ ; + /** + *
+     * Set if this tensor only has one remaining reference
+     * 
+ * + * optional bool has_single_reference = 5; + */ + public boolean getHasSingleReference() { + return hasSingleReference_; + } + /** + *
+     * Set if this tensor only has one remaining reference
+     * 
+ * + * optional bool has_single_reference = 5; + */ + public Builder setHasSingleReference(boolean value) { + + hasSingleReference_ = value; + onChanged(); + return this; + } + /** + *
+     * Set if this tensor only has one remaining reference
+     * 
+ * + * optional bool has_single_reference = 5; + */ + public Builder clearHasSingleReference() { + + hasSingleReference_ = false; + onChanged(); + return this; + } + + private long ptr_ ; + /** + *
+     * Address of the allocation.
+     * 
+ * + * optional uint64 ptr = 6; + */ + public long getPtr() { + return ptr_; + } + /** + *
+     * Address of the allocation.
+     * 
+ * + * optional uint64 ptr = 6; + */ + public Builder setPtr(long value) { + + ptr_ = value; + onChanged(); + return this; + } + /** + *
+     * Address of the allocation.
+     * 
+ * + * optional uint64 ptr = 6; + */ + public Builder clearPtr() { + + ptr_ = 0L; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.AllocationDescription) + } + + // @@protoc_insertion_point(class_scope:tensorflow.AllocationDescription) + private static final org.tensorflow.framework.AllocationDescription DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.AllocationDescription(); + } + + public static org.tensorflow.framework.AllocationDescription getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AllocationDescription parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AllocationDescription(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.AllocationDescription getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescriptionOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescriptionOrBuilder.java new file mode 100644 index 0000000000000..fd4fa2c9c778d --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescriptionOrBuilder.java @@ -0,0 +1,72 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/allocation_description.proto + +package org.tensorflow.framework; + +public interface AllocationDescriptionOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.AllocationDescription) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Total number of bytes requested
+   * 
+ * + * optional int64 requested_bytes = 1; + */ + long getRequestedBytes(); + + /** + *
+   * Total number of bytes allocated if known
+   * 
+ * + * optional int64 allocated_bytes = 2; + */ + long getAllocatedBytes(); + + /** + *
+   * Name of the allocator used
+   * 
+ * + * optional string allocator_name = 3; + */ + java.lang.String getAllocatorName(); + /** + *
+   * Name of the allocator used
+   * 
+ * + * optional string allocator_name = 3; + */ + com.google.protobuf.ByteString + getAllocatorNameBytes(); + + /** + *
+   * Identifier of the allocated buffer if known
+   * 
+ * + * optional int64 allocation_id = 4; + */ + long getAllocationId(); + + /** + *
+   * Set if this tensor only has one remaining reference
+   * 
+ * + * optional bool has_single_reference = 5; + */ + boolean getHasSingleReference(); + + /** + *
+   * Address of the allocation.
+   * 
+ * + * optional uint64 ptr = 6; + */ + long getPtr(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescriptionProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescriptionProtos.java new file mode 100644 index 0000000000000..5b4b3adc35dca --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocationDescriptionProtos.java @@ -0,0 +1,61 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/allocation_description.proto + +package org.tensorflow.framework; + +public final class AllocationDescriptionProtos { + private AllocationDescriptionProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_AllocationDescription_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_AllocationDescription_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n6tensorflow/core/framework/allocation_d" + + "escription.proto\022\ntensorflow\"\243\001\n\025Allocat" + + "ionDescription\022\027\n\017requested_bytes\030\001 \001(\003\022" + + "\027\n\017allocated_bytes\030\002 \001(\003\022\026\n\016allocator_na" + + "me\030\003 \001(\t\022\025\n\rallocation_id\030\004 \001(\003\022\034\n\024has_s" + + "ingle_reference\030\005 \001(\010\022\013\n\003ptr\030\006 \001(\004B<\n\030or" + + "g.tensorflow.frameworkB\033AllocationDescri" + + "ptionProtosP\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + internal_static_tensorflow_AllocationDescription_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_AllocationDescription_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_AllocationDescription_descriptor, + new java.lang.String[] { "RequestedBytes", "AllocatedBytes", "AllocatorName", "AllocationId", "HasSingleReference", "Ptr", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocatorMemoryUsed.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocatorMemoryUsed.java new file mode 100644 index 0000000000000..8f311f14ceb1b --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocatorMemoryUsed.java @@ -0,0 +1,612 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.AllocatorMemoryUsed} + */ +public final class AllocatorMemoryUsed extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.AllocatorMemoryUsed) + AllocatorMemoryUsedOrBuilder { + // Use AllocatorMemoryUsed.newBuilder() to construct. + private AllocatorMemoryUsed(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private AllocatorMemoryUsed() { + allocatorName_ = ""; + totalBytes_ = 0L; + peakBytes_ = 0L; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private AllocatorMemoryUsed( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + allocatorName_ = s; + break; + } + case 16: { + + totalBytes_ = input.readInt64(); + break; + } + case 24: { + + peakBytes_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_AllocatorMemoryUsed_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_AllocatorMemoryUsed_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.AllocatorMemoryUsed.class, org.tensorflow.framework.AllocatorMemoryUsed.Builder.class); + } + + public static final int ALLOCATOR_NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object allocatorName_; + /** + * optional string allocator_name = 1; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } + } + /** + * optional string allocator_name = 1; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TOTAL_BYTES_FIELD_NUMBER = 2; + private long totalBytes_; + /** + * optional int64 total_bytes = 2; + */ + public long getTotalBytes() { + return totalBytes_; + } + + public static final int PEAK_BYTES_FIELD_NUMBER = 3; + private long peakBytes_; + /** + * optional int64 peak_bytes = 3; + */ + public long getPeakBytes() { + return peakBytes_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getAllocatorNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, allocatorName_); + } + if (totalBytes_ != 0L) { + output.writeInt64(2, totalBytes_); + } + if (peakBytes_ != 0L) { + output.writeInt64(3, peakBytes_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getAllocatorNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, allocatorName_); + } + if (totalBytes_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, totalBytes_); + } + if (peakBytes_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, peakBytes_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.AllocatorMemoryUsed)) { + return super.equals(obj); + } + org.tensorflow.framework.AllocatorMemoryUsed other = (org.tensorflow.framework.AllocatorMemoryUsed) obj; + + boolean result = true; + result = result && getAllocatorName() + .equals(other.getAllocatorName()); + result = result && (getTotalBytes() + == other.getTotalBytes()); + result = result && (getPeakBytes() + == other.getPeakBytes()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + ALLOCATOR_NAME_FIELD_NUMBER; + hash = (53 * hash) + getAllocatorName().hashCode(); + hash = (37 * hash) + TOTAL_BYTES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTotalBytes()); + hash = (37 * hash) + PEAK_BYTES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getPeakBytes()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.AllocatorMemoryUsed parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.AllocatorMemoryUsed parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.AllocatorMemoryUsed parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.AllocatorMemoryUsed parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.AllocatorMemoryUsed parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AllocatorMemoryUsed parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.AllocatorMemoryUsed parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AllocatorMemoryUsed parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.AllocatorMemoryUsed parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AllocatorMemoryUsed parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.AllocatorMemoryUsed prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.AllocatorMemoryUsed} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.AllocatorMemoryUsed) + org.tensorflow.framework.AllocatorMemoryUsedOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_AllocatorMemoryUsed_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_AllocatorMemoryUsed_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.AllocatorMemoryUsed.class, org.tensorflow.framework.AllocatorMemoryUsed.Builder.class); + } + + // Construct using org.tensorflow.framework.AllocatorMemoryUsed.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + allocatorName_ = ""; + + totalBytes_ = 0L; + + peakBytes_ = 0L; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_AllocatorMemoryUsed_descriptor; + } + + public org.tensorflow.framework.AllocatorMemoryUsed getDefaultInstanceForType() { + return org.tensorflow.framework.AllocatorMemoryUsed.getDefaultInstance(); + } + + public org.tensorflow.framework.AllocatorMemoryUsed build() { + org.tensorflow.framework.AllocatorMemoryUsed result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.AllocatorMemoryUsed buildPartial() { + org.tensorflow.framework.AllocatorMemoryUsed result = new org.tensorflow.framework.AllocatorMemoryUsed(this); + result.allocatorName_ = allocatorName_; + result.totalBytes_ = totalBytes_; + result.peakBytes_ = peakBytes_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.AllocatorMemoryUsed) { + return mergeFrom((org.tensorflow.framework.AllocatorMemoryUsed)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.AllocatorMemoryUsed other) { + if (other == org.tensorflow.framework.AllocatorMemoryUsed.getDefaultInstance()) return this; + if (!other.getAllocatorName().isEmpty()) { + allocatorName_ = other.allocatorName_; + onChanged(); + } + if (other.getTotalBytes() != 0L) { + setTotalBytes(other.getTotalBytes()); + } + if (other.getPeakBytes() != 0L) { + setPeakBytes(other.getPeakBytes()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.AllocatorMemoryUsed parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.AllocatorMemoryUsed) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object allocatorName_ = ""; + /** + * optional string allocator_name = 1; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string allocator_name = 1; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string allocator_name = 1; + */ + public Builder setAllocatorName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + allocatorName_ = value; + onChanged(); + return this; + } + /** + * optional string allocator_name = 1; + */ + public Builder clearAllocatorName() { + + allocatorName_ = getDefaultInstance().getAllocatorName(); + onChanged(); + return this; + } + /** + * optional string allocator_name = 1; + */ + public Builder setAllocatorNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + allocatorName_ = value; + onChanged(); + return this; + } + + private long totalBytes_ ; + /** + * optional int64 total_bytes = 2; + */ + public long getTotalBytes() { + return totalBytes_; + } + /** + * optional int64 total_bytes = 2; + */ + public Builder setTotalBytes(long value) { + + totalBytes_ = value; + onChanged(); + return this; + } + /** + * optional int64 total_bytes = 2; + */ + public Builder clearTotalBytes() { + + totalBytes_ = 0L; + onChanged(); + return this; + } + + private long peakBytes_ ; + /** + * optional int64 peak_bytes = 3; + */ + public long getPeakBytes() { + return peakBytes_; + } + /** + * optional int64 peak_bytes = 3; + */ + public Builder setPeakBytes(long value) { + + peakBytes_ = value; + onChanged(); + return this; + } + /** + * optional int64 peak_bytes = 3; + */ + public Builder clearPeakBytes() { + + peakBytes_ = 0L; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.AllocatorMemoryUsed) + } + + // @@protoc_insertion_point(class_scope:tensorflow.AllocatorMemoryUsed) + private static final org.tensorflow.framework.AllocatorMemoryUsed DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.AllocatorMemoryUsed(); + } + + public static org.tensorflow.framework.AllocatorMemoryUsed getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AllocatorMemoryUsed parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AllocatorMemoryUsed(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.AllocatorMemoryUsed getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocatorMemoryUsedOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocatorMemoryUsedOrBuilder.java new file mode 100644 index 0000000000000..cdd889297f726 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AllocatorMemoryUsedOrBuilder.java @@ -0,0 +1,29 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +public interface AllocatorMemoryUsedOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.AllocatorMemoryUsed) + com.google.protobuf.MessageOrBuilder { + + /** + * optional string allocator_name = 1; + */ + java.lang.String getAllocatorName(); + /** + * optional string allocator_name = 1; + */ + com.google.protobuf.ByteString + getAllocatorNameBytes(); + + /** + * optional int64 total_bytes = 2; + */ + long getTotalBytes(); + + /** + * optional int64 peak_bytes = 3; + */ + long getPeakBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValue.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValue.java new file mode 100644 index 0000000000000..c4ce93a3fa545 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValue.java @@ -0,0 +1,4667 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/attr_value.proto + +package org.tensorflow.framework; + +/** + *
+ * Protocol buffer representing the value for an attr used to configure an Op.
+ * Comment indicates the corresponding attr type.  Only the field matching the
+ * attr type may be filled.
+ * 
+ * + * Protobuf type {@code tensorflow.AttrValue} + */ +public final class AttrValue extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.AttrValue) + AttrValueOrBuilder { + // Use AttrValue.newBuilder() to construct. + private AttrValue(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private AttrValue() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private AttrValue( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + org.tensorflow.framework.AttrValue.ListValue.Builder subBuilder = null; + if (valueCase_ == 1) { + subBuilder = ((org.tensorflow.framework.AttrValue.ListValue) value_).toBuilder(); + } + value_ = + input.readMessage(org.tensorflow.framework.AttrValue.ListValue.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((org.tensorflow.framework.AttrValue.ListValue) value_); + value_ = subBuilder.buildPartial(); + } + valueCase_ = 1; + break; + } + case 18: { + valueCase_ = 2; + value_ = input.readBytes(); + break; + } + case 24: { + valueCase_ = 3; + value_ = input.readInt64(); + break; + } + case 37: { + valueCase_ = 4; + value_ = input.readFloat(); + break; + } + case 40: { + valueCase_ = 5; + value_ = input.readBool(); + break; + } + case 48: { + int rawValue = input.readEnum(); + valueCase_ = 6; + value_ = rawValue; + break; + } + case 58: { + org.tensorflow.framework.TensorShapeProto.Builder subBuilder = null; + if (valueCase_ == 7) { + subBuilder = ((org.tensorflow.framework.TensorShapeProto) value_).toBuilder(); + } + value_ = + input.readMessage(org.tensorflow.framework.TensorShapeProto.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((org.tensorflow.framework.TensorShapeProto) value_); + value_ = subBuilder.buildPartial(); + } + valueCase_ = 7; + break; + } + case 66: { + org.tensorflow.framework.TensorProto.Builder subBuilder = null; + if (valueCase_ == 8) { + subBuilder = ((org.tensorflow.framework.TensorProto) value_).toBuilder(); + } + value_ = + input.readMessage(org.tensorflow.framework.TensorProto.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((org.tensorflow.framework.TensorProto) value_); + value_ = subBuilder.buildPartial(); + } + valueCase_ = 8; + break; + } + case 74: { + java.lang.String s = input.readStringRequireUtf8(); + valueCase_ = 9; + value_ = s; + break; + } + case 82: { + org.tensorflow.framework.NameAttrList.Builder subBuilder = null; + if (valueCase_ == 10) { + subBuilder = ((org.tensorflow.framework.NameAttrList) value_).toBuilder(); + } + value_ = + input.readMessage(org.tensorflow.framework.NameAttrList.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((org.tensorflow.framework.NameAttrList) value_); + value_ = subBuilder.buildPartial(); + } + valueCase_ = 10; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.AttrValue.class, org.tensorflow.framework.AttrValue.Builder.class); + } + + public interface ListValueOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.AttrValue.ListValue) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * "list(string)"
+     * 
+ * + * repeated bytes s = 2; + */ + java.util.List getSList(); + /** + *
+     * "list(string)"
+     * 
+ * + * repeated bytes s = 2; + */ + int getSCount(); + /** + *
+     * "list(string)"
+     * 
+ * + * repeated bytes s = 2; + */ + com.google.protobuf.ByteString getS(int index); + + /** + *
+     * "list(int)"
+     * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + java.util.List getIList(); + /** + *
+     * "list(int)"
+     * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + int getICount(); + /** + *
+     * "list(int)"
+     * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + long getI(int index); + + /** + *
+     * "list(float)"
+     * 
+ * + * repeated float f = 4 [packed = true]; + */ + java.util.List getFList(); + /** + *
+     * "list(float)"
+     * 
+ * + * repeated float f = 4 [packed = true]; + */ + int getFCount(); + /** + *
+     * "list(float)"
+     * 
+ * + * repeated float f = 4 [packed = true]; + */ + float getF(int index); + + /** + *
+     * "list(bool)"
+     * 
+ * + * repeated bool b = 5 [packed = true]; + */ + java.util.List getBList(); + /** + *
+     * "list(bool)"
+     * 
+ * + * repeated bool b = 5 [packed = true]; + */ + int getBCount(); + /** + *
+     * "list(bool)"
+     * 
+ * + * repeated bool b = 5 [packed = true]; + */ + boolean getB(int index); + + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + java.util.List getTypeList(); + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + int getTypeCount(); + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + org.tensorflow.framework.DataType getType(int index); + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + java.util.List + getTypeValueList(); + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + int getTypeValue(int index); + + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + java.util.List + getShapeList(); + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + org.tensorflow.framework.TensorShapeProto getShape(int index); + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + int getShapeCount(); + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + java.util.List + getShapeOrBuilderList(); + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder( + int index); + + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + java.util.List + getTensorList(); + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + org.tensorflow.framework.TensorProto getTensor(int index); + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + int getTensorCount(); + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + java.util.List + getTensorOrBuilderList(); + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + org.tensorflow.framework.TensorProtoOrBuilder getTensorOrBuilder( + int index); + } + /** + *
+   * LINT.IfChange
+   * 
+ * + * Protobuf type {@code tensorflow.AttrValue.ListValue} + */ + public static final class ListValue extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.AttrValue.ListValue) + ListValueOrBuilder { + // Use ListValue.newBuilder() to construct. + private ListValue(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ListValue() { + s_ = java.util.Collections.emptyList(); + i_ = java.util.Collections.emptyList(); + f_ = java.util.Collections.emptyList(); + b_ = java.util.Collections.emptyList(); + type_ = java.util.Collections.emptyList(); + shape_ = java.util.Collections.emptyList(); + tensor_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private ListValue( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + s_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + s_.add(input.readBytes()); + break; + } + case 24: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + i_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + i_.add(input.readInt64()); + break; + } + case 26: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { + i_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + while (input.getBytesUntilLimit() > 0) { + i_.add(input.readInt64()); + } + input.popLimit(limit); + break; + } + case 37: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + f_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + f_.add(input.readFloat()); + break; + } + case 34: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004) && input.getBytesUntilLimit() > 0) { + f_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + while (input.getBytesUntilLimit() > 0) { + f_.add(input.readFloat()); + } + input.popLimit(limit); + break; + } + case 40: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + b_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + b_.add(input.readBool()); + break; + } + case 42: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008) && input.getBytesUntilLimit() > 0) { + b_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + while (input.getBytesUntilLimit() > 0) { + b_.add(input.readBool()); + } + input.popLimit(limit); + break; + } + case 48: { + int rawValue = input.readEnum(); + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + type_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000010; + } + type_.add(rawValue); + break; + } + case 50: { + int length = input.readRawVarint32(); + int oldLimit = input.pushLimit(length); + while(input.getBytesUntilLimit() > 0) { + int rawValue = input.readEnum(); + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + type_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000010; + } + type_.add(rawValue); + } + input.popLimit(oldLimit); + break; + } + case 58: { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + shape_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000020; + } + shape_.add( + input.readMessage(org.tensorflow.framework.TensorShapeProto.parser(), extensionRegistry)); + break; + } + case 66: { + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + tensor_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000040; + } + tensor_.add( + input.readMessage(org.tensorflow.framework.TensorProto.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + s_ = java.util.Collections.unmodifiableList(s_); + } + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + i_ = java.util.Collections.unmodifiableList(i_); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + f_ = java.util.Collections.unmodifiableList(f_); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + b_ = java.util.Collections.unmodifiableList(b_); + } + if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + type_ = java.util.Collections.unmodifiableList(type_); + } + if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + shape_ = java.util.Collections.unmodifiableList(shape_); + } + if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + tensor_ = java.util.Collections.unmodifiableList(tensor_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_ListValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_ListValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.AttrValue.ListValue.class, org.tensorflow.framework.AttrValue.ListValue.Builder.class); + } + + public static final int S_FIELD_NUMBER = 2; + private java.util.List s_; + /** + *
+     * "list(string)"
+     * 
+ * + * repeated bytes s = 2; + */ + public java.util.List + getSList() { + return s_; + } + /** + *
+     * "list(string)"
+     * 
+ * + * repeated bytes s = 2; + */ + public int getSCount() { + return s_.size(); + } + /** + *
+     * "list(string)"
+     * 
+ * + * repeated bytes s = 2; + */ + public com.google.protobuf.ByteString getS(int index) { + return s_.get(index); + } + + public static final int I_FIELD_NUMBER = 3; + private java.util.List i_; + /** + *
+     * "list(int)"
+     * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public java.util.List + getIList() { + return i_; + } + /** + *
+     * "list(int)"
+     * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public int getICount() { + return i_.size(); + } + /** + *
+     * "list(int)"
+     * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public long getI(int index) { + return i_.get(index); + } + private int iMemoizedSerializedSize = -1; + + public static final int F_FIELD_NUMBER = 4; + private java.util.List f_; + /** + *
+     * "list(float)"
+     * 
+ * + * repeated float f = 4 [packed = true]; + */ + public java.util.List + getFList() { + return f_; + } + /** + *
+     * "list(float)"
+     * 
+ * + * repeated float f = 4 [packed = true]; + */ + public int getFCount() { + return f_.size(); + } + /** + *
+     * "list(float)"
+     * 
+ * + * repeated float f = 4 [packed = true]; + */ + public float getF(int index) { + return f_.get(index); + } + private int fMemoizedSerializedSize = -1; + + public static final int B_FIELD_NUMBER = 5; + private java.util.List b_; + /** + *
+     * "list(bool)"
+     * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public java.util.List + getBList() { + return b_; + } + /** + *
+     * "list(bool)"
+     * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public int getBCount() { + return b_.size(); + } + /** + *
+     * "list(bool)"
+     * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public boolean getB(int index) { + return b_.get(index); + } + private int bMemoizedSerializedSize = -1; + + public static final int TYPE_FIELD_NUMBER = 6; + private java.util.List type_; + private static final com.google.protobuf.Internal.ListAdapter.Converter< + java.lang.Integer, org.tensorflow.framework.DataType> type_converter_ = + new com.google.protobuf.Internal.ListAdapter.Converter< + java.lang.Integer, org.tensorflow.framework.DataType>() { + public org.tensorflow.framework.DataType convert(java.lang.Integer from) { + org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf(from); + return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result; + } + }; + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public java.util.List getTypeList() { + return new com.google.protobuf.Internal.ListAdapter< + java.lang.Integer, org.tensorflow.framework.DataType>(type_, type_converter_); + } + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public int getTypeCount() { + return type_.size(); + } + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public org.tensorflow.framework.DataType getType(int index) { + return type_converter_.convert(type_.get(index)); + } + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public java.util.List + getTypeValueList() { + return type_; + } + /** + *
+     * "list(type)"
+     * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public int getTypeValue(int index) { + return type_.get(index); + } + private int typeMemoizedSerializedSize; + + public static final int SHAPE_FIELD_NUMBER = 7; + private java.util.List shape_; + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public java.util.List getShapeList() { + return shape_; + } + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public java.util.List + getShapeOrBuilderList() { + return shape_; + } + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public int getShapeCount() { + return shape_.size(); + } + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProto getShape(int index) { + return shape_.get(index); + } + /** + *
+     * "list(shape)"
+     * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder( + int index) { + return shape_.get(index); + } + + public static final int TENSOR_FIELD_NUMBER = 8; + private java.util.List tensor_; + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public java.util.List getTensorList() { + return tensor_; + } + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public java.util.List + getTensorOrBuilderList() { + return tensor_; + } + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public int getTensorCount() { + return tensor_.size(); + } + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProto getTensor(int index) { + return tensor_.get(index); + } + /** + *
+     * "list(tensor)"
+     * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProtoOrBuilder getTensorOrBuilder( + int index) { + return tensor_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < s_.size(); i++) { + output.writeBytes(2, s_.get(i)); + } + if (getIList().size() > 0) { + output.writeUInt32NoTag(26); + output.writeUInt32NoTag(iMemoizedSerializedSize); + } + for (int i = 0; i < i_.size(); i++) { + output.writeInt64NoTag(i_.get(i)); + } + if (getFList().size() > 0) { + output.writeUInt32NoTag(34); + output.writeUInt32NoTag(fMemoizedSerializedSize); + } + for (int i = 0; i < f_.size(); i++) { + output.writeFloatNoTag(f_.get(i)); + } + if (getBList().size() > 0) { + output.writeUInt32NoTag(42); + output.writeUInt32NoTag(bMemoizedSerializedSize); + } + for (int i = 0; i < b_.size(); i++) { + output.writeBoolNoTag(b_.get(i)); + } + if (getTypeList().size() > 0) { + output.writeUInt32NoTag(50); + output.writeUInt32NoTag(typeMemoizedSerializedSize); + } + for (int i = 0; i < type_.size(); i++) { + output.writeEnumNoTag(type_.get(i)); + } + for (int i = 0; i < shape_.size(); i++) { + output.writeMessage(7, shape_.get(i)); + } + for (int i = 0; i < tensor_.size(); i++) { + output.writeMessage(8, tensor_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < s_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(s_.get(i)); + } + size += dataSize; + size += 1 * getSList().size(); + } + { + int dataSize = 0; + for (int i = 0; i < i_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeInt64SizeNoTag(i_.get(i)); + } + size += dataSize; + if (!getIList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + iMemoizedSerializedSize = dataSize; + } + { + int dataSize = 0; + dataSize = 4 * getFList().size(); + size += dataSize; + if (!getFList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + fMemoizedSerializedSize = dataSize; + } + { + int dataSize = 0; + dataSize = 1 * getBList().size(); + size += dataSize; + if (!getBList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + bMemoizedSerializedSize = dataSize; + } + { + int dataSize = 0; + for (int i = 0; i < type_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeEnumSizeNoTag(type_.get(i)); + } + size += dataSize; + if (!getTypeList().isEmpty()) { size += 1; + size += com.google.protobuf.CodedOutputStream + .computeUInt32SizeNoTag(dataSize); + }typeMemoizedSerializedSize = dataSize; + } + for (int i = 0; i < shape_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, shape_.get(i)); + } + for (int i = 0; i < tensor_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, tensor_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.AttrValue.ListValue)) { + return super.equals(obj); + } + org.tensorflow.framework.AttrValue.ListValue other = (org.tensorflow.framework.AttrValue.ListValue) obj; + + boolean result = true; + result = result && getSList() + .equals(other.getSList()); + result = result && getIList() + .equals(other.getIList()); + result = result && getFList() + .equals(other.getFList()); + result = result && getBList() + .equals(other.getBList()); + result = result && type_.equals(other.type_); + result = result && getShapeList() + .equals(other.getShapeList()); + result = result && getTensorList() + .equals(other.getTensorList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getSCount() > 0) { + hash = (37 * hash) + S_FIELD_NUMBER; + hash = (53 * hash) + getSList().hashCode(); + } + if (getICount() > 0) { + hash = (37 * hash) + I_FIELD_NUMBER; + hash = (53 * hash) + getIList().hashCode(); + } + if (getFCount() > 0) { + hash = (37 * hash) + F_FIELD_NUMBER; + hash = (53 * hash) + getFList().hashCode(); + } + if (getBCount() > 0) { + hash = (37 * hash) + B_FIELD_NUMBER; + hash = (53 * hash) + getBList().hashCode(); + } + if (getTypeCount() > 0) { + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + type_.hashCode(); + } + if (getShapeCount() > 0) { + hash = (37 * hash) + SHAPE_FIELD_NUMBER; + hash = (53 * hash) + getShapeList().hashCode(); + } + if (getTensorCount() > 0) { + hash = (37 * hash) + TENSOR_FIELD_NUMBER; + hash = (53 * hash) + getTensorList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.AttrValue.ListValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.AttrValue.ListValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.AttrValue.ListValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.AttrValue.ListValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.AttrValue.ListValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AttrValue.ListValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.AttrValue.ListValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AttrValue.ListValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.AttrValue.ListValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AttrValue.ListValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.AttrValue.ListValue prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * LINT.IfChange
+     * 
+ * + * Protobuf type {@code tensorflow.AttrValue.ListValue} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.AttrValue.ListValue) + org.tensorflow.framework.AttrValue.ListValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_ListValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_ListValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.AttrValue.ListValue.class, org.tensorflow.framework.AttrValue.ListValue.Builder.class); + } + + // Construct using org.tensorflow.framework.AttrValue.ListValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getShapeFieldBuilder(); + getTensorFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + s_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + i_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + f_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + b_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + type_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + if (shapeBuilder_ == null) { + shape_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + } else { + shapeBuilder_.clear(); + } + if (tensorBuilder_ == null) { + tensor_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + } else { + tensorBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_ListValue_descriptor; + } + + public org.tensorflow.framework.AttrValue.ListValue getDefaultInstanceForType() { + return org.tensorflow.framework.AttrValue.ListValue.getDefaultInstance(); + } + + public org.tensorflow.framework.AttrValue.ListValue build() { + org.tensorflow.framework.AttrValue.ListValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.AttrValue.ListValue buildPartial() { + org.tensorflow.framework.AttrValue.ListValue result = new org.tensorflow.framework.AttrValue.ListValue(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + s_ = java.util.Collections.unmodifiableList(s_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.s_ = s_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + i_ = java.util.Collections.unmodifiableList(i_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.i_ = i_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + f_ = java.util.Collections.unmodifiableList(f_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.f_ = f_; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + b_ = java.util.Collections.unmodifiableList(b_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.b_ = b_; + if (((bitField0_ & 0x00000010) == 0x00000010)) { + type_ = java.util.Collections.unmodifiableList(type_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.type_ = type_; + if (shapeBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020)) { + shape_ = java.util.Collections.unmodifiableList(shape_); + bitField0_ = (bitField0_ & ~0x00000020); + } + result.shape_ = shape_; + } else { + result.shape_ = shapeBuilder_.build(); + } + if (tensorBuilder_ == null) { + if (((bitField0_ & 0x00000040) == 0x00000040)) { + tensor_ = java.util.Collections.unmodifiableList(tensor_); + bitField0_ = (bitField0_ & ~0x00000040); + } + result.tensor_ = tensor_; + } else { + result.tensor_ = tensorBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.AttrValue.ListValue) { + return mergeFrom((org.tensorflow.framework.AttrValue.ListValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.AttrValue.ListValue other) { + if (other == org.tensorflow.framework.AttrValue.ListValue.getDefaultInstance()) return this; + if (!other.s_.isEmpty()) { + if (s_.isEmpty()) { + s_ = other.s_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureSIsMutable(); + s_.addAll(other.s_); + } + onChanged(); + } + if (!other.i_.isEmpty()) { + if (i_.isEmpty()) { + i_ = other.i_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureIIsMutable(); + i_.addAll(other.i_); + } + onChanged(); + } + if (!other.f_.isEmpty()) { + if (f_.isEmpty()) { + f_ = other.f_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureFIsMutable(); + f_.addAll(other.f_); + } + onChanged(); + } + if (!other.b_.isEmpty()) { + if (b_.isEmpty()) { + b_ = other.b_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureBIsMutable(); + b_.addAll(other.b_); + } + onChanged(); + } + if (!other.type_.isEmpty()) { + if (type_.isEmpty()) { + type_ = other.type_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureTypeIsMutable(); + type_.addAll(other.type_); + } + onChanged(); + } + if (shapeBuilder_ == null) { + if (!other.shape_.isEmpty()) { + if (shape_.isEmpty()) { + shape_ = other.shape_; + bitField0_ = (bitField0_ & ~0x00000020); + } else { + ensureShapeIsMutable(); + shape_.addAll(other.shape_); + } + onChanged(); + } + } else { + if (!other.shape_.isEmpty()) { + if (shapeBuilder_.isEmpty()) { + shapeBuilder_.dispose(); + shapeBuilder_ = null; + shape_ = other.shape_; + bitField0_ = (bitField0_ & ~0x00000020); + shapeBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getShapeFieldBuilder() : null; + } else { + shapeBuilder_.addAllMessages(other.shape_); + } + } + } + if (tensorBuilder_ == null) { + if (!other.tensor_.isEmpty()) { + if (tensor_.isEmpty()) { + tensor_ = other.tensor_; + bitField0_ = (bitField0_ & ~0x00000040); + } else { + ensureTensorIsMutable(); + tensor_.addAll(other.tensor_); + } + onChanged(); + } + } else { + if (!other.tensor_.isEmpty()) { + if (tensorBuilder_.isEmpty()) { + tensorBuilder_.dispose(); + tensorBuilder_ = null; + tensor_ = other.tensor_; + bitField0_ = (bitField0_ & ~0x00000040); + tensorBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getTensorFieldBuilder() : null; + } else { + tensorBuilder_.addAllMessages(other.tensor_); + } + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.AttrValue.ListValue parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.AttrValue.ListValue) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List s_ = java.util.Collections.emptyList(); + private void ensureSIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + s_ = new java.util.ArrayList(s_); + bitField0_ |= 0x00000001; + } + } + /** + *
+       * "list(string)"
+       * 
+ * + * repeated bytes s = 2; + */ + public java.util.List + getSList() { + return java.util.Collections.unmodifiableList(s_); + } + /** + *
+       * "list(string)"
+       * 
+ * + * repeated bytes s = 2; + */ + public int getSCount() { + return s_.size(); + } + /** + *
+       * "list(string)"
+       * 
+ * + * repeated bytes s = 2; + */ + public com.google.protobuf.ByteString getS(int index) { + return s_.get(index); + } + /** + *
+       * "list(string)"
+       * 
+ * + * repeated bytes s = 2; + */ + public Builder setS( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSIsMutable(); + s_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * "list(string)"
+       * 
+ * + * repeated bytes s = 2; + */ + public Builder addS(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSIsMutable(); + s_.add(value); + onChanged(); + return this; + } + /** + *
+       * "list(string)"
+       * 
+ * + * repeated bytes s = 2; + */ + public Builder addAllS( + java.lang.Iterable values) { + ensureSIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, s_); + onChanged(); + return this; + } + /** + *
+       * "list(string)"
+       * 
+ * + * repeated bytes s = 2; + */ + public Builder clearS() { + s_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + private java.util.List i_ = java.util.Collections.emptyList(); + private void ensureIIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + i_ = new java.util.ArrayList(i_); + bitField0_ |= 0x00000002; + } + } + /** + *
+       * "list(int)"
+       * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public java.util.List + getIList() { + return java.util.Collections.unmodifiableList(i_); + } + /** + *
+       * "list(int)"
+       * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public int getICount() { + return i_.size(); + } + /** + *
+       * "list(int)"
+       * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public long getI(int index) { + return i_.get(index); + } + /** + *
+       * "list(int)"
+       * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public Builder setI( + int index, long value) { + ensureIIsMutable(); + i_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * "list(int)"
+       * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public Builder addI(long value) { + ensureIIsMutable(); + i_.add(value); + onChanged(); + return this; + } + /** + *
+       * "list(int)"
+       * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public Builder addAllI( + java.lang.Iterable values) { + ensureIIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, i_); + onChanged(); + return this; + } + /** + *
+       * "list(int)"
+       * 
+ * + * repeated int64 i = 3 [packed = true]; + */ + public Builder clearI() { + i_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + private java.util.List f_ = java.util.Collections.emptyList(); + private void ensureFIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + f_ = new java.util.ArrayList(f_); + bitField0_ |= 0x00000004; + } + } + /** + *
+       * "list(float)"
+       * 
+ * + * repeated float f = 4 [packed = true]; + */ + public java.util.List + getFList() { + return java.util.Collections.unmodifiableList(f_); + } + /** + *
+       * "list(float)"
+       * 
+ * + * repeated float f = 4 [packed = true]; + */ + public int getFCount() { + return f_.size(); + } + /** + *
+       * "list(float)"
+       * 
+ * + * repeated float f = 4 [packed = true]; + */ + public float getF(int index) { + return f_.get(index); + } + /** + *
+       * "list(float)"
+       * 
+ * + * repeated float f = 4 [packed = true]; + */ + public Builder setF( + int index, float value) { + ensureFIsMutable(); + f_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * "list(float)"
+       * 
+ * + * repeated float f = 4 [packed = true]; + */ + public Builder addF(float value) { + ensureFIsMutable(); + f_.add(value); + onChanged(); + return this; + } + /** + *
+       * "list(float)"
+       * 
+ * + * repeated float f = 4 [packed = true]; + */ + public Builder addAllF( + java.lang.Iterable values) { + ensureFIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, f_); + onChanged(); + return this; + } + /** + *
+       * "list(float)"
+       * 
+ * + * repeated float f = 4 [packed = true]; + */ + public Builder clearF() { + f_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + + private java.util.List b_ = java.util.Collections.emptyList(); + private void ensureBIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + b_ = new java.util.ArrayList(b_); + bitField0_ |= 0x00000008; + } + } + /** + *
+       * "list(bool)"
+       * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public java.util.List + getBList() { + return java.util.Collections.unmodifiableList(b_); + } + /** + *
+       * "list(bool)"
+       * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public int getBCount() { + return b_.size(); + } + /** + *
+       * "list(bool)"
+       * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public boolean getB(int index) { + return b_.get(index); + } + /** + *
+       * "list(bool)"
+       * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public Builder setB( + int index, boolean value) { + ensureBIsMutable(); + b_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * "list(bool)"
+       * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public Builder addB(boolean value) { + ensureBIsMutable(); + b_.add(value); + onChanged(); + return this; + } + /** + *
+       * "list(bool)"
+       * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public Builder addAllB( + java.lang.Iterable values) { + ensureBIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, b_); + onChanged(); + return this; + } + /** + *
+       * "list(bool)"
+       * 
+ * + * repeated bool b = 5 [packed = true]; + */ + public Builder clearB() { + b_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + + private java.util.List type_ = + java.util.Collections.emptyList(); + private void ensureTypeIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + type_ = new java.util.ArrayList(type_); + bitField0_ |= 0x00000010; + } + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public java.util.List getTypeList() { + return new com.google.protobuf.Internal.ListAdapter< + java.lang.Integer, org.tensorflow.framework.DataType>(type_, type_converter_); + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public int getTypeCount() { + return type_.size(); + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public org.tensorflow.framework.DataType getType(int index) { + return type_converter_.convert(type_.get(index)); + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public Builder setType( + int index, org.tensorflow.framework.DataType value) { + if (value == null) { + throw new NullPointerException(); + } + ensureTypeIsMutable(); + type_.set(index, value.getNumber()); + onChanged(); + return this; + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public Builder addType(org.tensorflow.framework.DataType value) { + if (value == null) { + throw new NullPointerException(); + } + ensureTypeIsMutable(); + type_.add(value.getNumber()); + onChanged(); + return this; + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public Builder addAllType( + java.lang.Iterable values) { + ensureTypeIsMutable(); + for (org.tensorflow.framework.DataType value : values) { + type_.add(value.getNumber()); + } + onChanged(); + return this; + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public Builder clearType() { + type_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public java.util.List + getTypeValueList() { + return java.util.Collections.unmodifiableList(type_); + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public int getTypeValue(int index) { + return type_.get(index); + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public Builder setTypeValue( + int index, int value) { + ensureTypeIsMutable(); + type_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public Builder addTypeValue(int value) { + ensureTypeIsMutable(); + type_.add(value); + onChanged(); + return this; + } + /** + *
+       * "list(type)"
+       * 
+ * + * repeated .tensorflow.DataType type = 6 [packed = true]; + */ + public Builder addAllTypeValue( + java.lang.Iterable values) { + ensureTypeIsMutable(); + for (int value : values) { + type_.add(value); + } + onChanged(); + return this; + } + + private java.util.List shape_ = + java.util.Collections.emptyList(); + private void ensureShapeIsMutable() { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { + shape_ = new java.util.ArrayList(shape_); + bitField0_ |= 0x00000020; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder> shapeBuilder_; + + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public java.util.List getShapeList() { + if (shapeBuilder_ == null) { + return java.util.Collections.unmodifiableList(shape_); + } else { + return shapeBuilder_.getMessageList(); + } + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public int getShapeCount() { + if (shapeBuilder_ == null) { + return shape_.size(); + } else { + return shapeBuilder_.getCount(); + } + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProto getShape(int index) { + if (shapeBuilder_ == null) { + return shape_.get(index); + } else { + return shapeBuilder_.getMessage(index); + } + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public Builder setShape( + int index, org.tensorflow.framework.TensorShapeProto value) { + if (shapeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureShapeIsMutable(); + shape_.set(index, value); + onChanged(); + } else { + shapeBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public Builder setShape( + int index, org.tensorflow.framework.TensorShapeProto.Builder builderForValue) { + if (shapeBuilder_ == null) { + ensureShapeIsMutable(); + shape_.set(index, builderForValue.build()); + onChanged(); + } else { + shapeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public Builder addShape(org.tensorflow.framework.TensorShapeProto value) { + if (shapeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureShapeIsMutable(); + shape_.add(value); + onChanged(); + } else { + shapeBuilder_.addMessage(value); + } + return this; + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public Builder addShape( + int index, org.tensorflow.framework.TensorShapeProto value) { + if (shapeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureShapeIsMutable(); + shape_.add(index, value); + onChanged(); + } else { + shapeBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public Builder addShape( + org.tensorflow.framework.TensorShapeProto.Builder builderForValue) { + if (shapeBuilder_ == null) { + ensureShapeIsMutable(); + shape_.add(builderForValue.build()); + onChanged(); + } else { + shapeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public Builder addShape( + int index, org.tensorflow.framework.TensorShapeProto.Builder builderForValue) { + if (shapeBuilder_ == null) { + ensureShapeIsMutable(); + shape_.add(index, builderForValue.build()); + onChanged(); + } else { + shapeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public Builder addAllShape( + java.lang.Iterable values) { + if (shapeBuilder_ == null) { + ensureShapeIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, shape_); + onChanged(); + } else { + shapeBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public Builder clearShape() { + if (shapeBuilder_ == null) { + shape_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + } else { + shapeBuilder_.clear(); + } + return this; + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public Builder removeShape(int index) { + if (shapeBuilder_ == null) { + ensureShapeIsMutable(); + shape_.remove(index); + onChanged(); + } else { + shapeBuilder_.remove(index); + } + return this; + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProto.Builder getShapeBuilder( + int index) { + return getShapeFieldBuilder().getBuilder(index); + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder( + int index) { + if (shapeBuilder_ == null) { + return shape_.get(index); } else { + return shapeBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public java.util.List + getShapeOrBuilderList() { + if (shapeBuilder_ != null) { + return shapeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(shape_); + } + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProto.Builder addShapeBuilder() { + return getShapeFieldBuilder().addBuilder( + org.tensorflow.framework.TensorShapeProto.getDefaultInstance()); + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProto.Builder addShapeBuilder( + int index) { + return getShapeFieldBuilder().addBuilder( + index, org.tensorflow.framework.TensorShapeProto.getDefaultInstance()); + } + /** + *
+       * "list(shape)"
+       * 
+ * + * repeated .tensorflow.TensorShapeProto shape = 7; + */ + public java.util.List + getShapeBuilderList() { + return getShapeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder> + getShapeFieldBuilder() { + if (shapeBuilder_ == null) { + shapeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder>( + shape_, + ((bitField0_ & 0x00000020) == 0x00000020), + getParentForChildren(), + isClean()); + shape_ = null; + } + return shapeBuilder_; + } + + private java.util.List tensor_ = + java.util.Collections.emptyList(); + private void ensureTensorIsMutable() { + if (!((bitField0_ & 0x00000040) == 0x00000040)) { + tensor_ = new java.util.ArrayList(tensor_); + bitField0_ |= 0x00000040; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.TensorProto, org.tensorflow.framework.TensorProto.Builder, org.tensorflow.framework.TensorProtoOrBuilder> tensorBuilder_; + + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public java.util.List getTensorList() { + if (tensorBuilder_ == null) { + return java.util.Collections.unmodifiableList(tensor_); + } else { + return tensorBuilder_.getMessageList(); + } + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public int getTensorCount() { + if (tensorBuilder_ == null) { + return tensor_.size(); + } else { + return tensorBuilder_.getCount(); + } + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProto getTensor(int index) { + if (tensorBuilder_ == null) { + return tensor_.get(index); + } else { + return tensorBuilder_.getMessage(index); + } + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public Builder setTensor( + int index, org.tensorflow.framework.TensorProto value) { + if (tensorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTensorIsMutable(); + tensor_.set(index, value); + onChanged(); + } else { + tensorBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public Builder setTensor( + int index, org.tensorflow.framework.TensorProto.Builder builderForValue) { + if (tensorBuilder_ == null) { + ensureTensorIsMutable(); + tensor_.set(index, builderForValue.build()); + onChanged(); + } else { + tensorBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public Builder addTensor(org.tensorflow.framework.TensorProto value) { + if (tensorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTensorIsMutable(); + tensor_.add(value); + onChanged(); + } else { + tensorBuilder_.addMessage(value); + } + return this; + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public Builder addTensor( + int index, org.tensorflow.framework.TensorProto value) { + if (tensorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTensorIsMutable(); + tensor_.add(index, value); + onChanged(); + } else { + tensorBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public Builder addTensor( + org.tensorflow.framework.TensorProto.Builder builderForValue) { + if (tensorBuilder_ == null) { + ensureTensorIsMutable(); + tensor_.add(builderForValue.build()); + onChanged(); + } else { + tensorBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public Builder addTensor( + int index, org.tensorflow.framework.TensorProto.Builder builderForValue) { + if (tensorBuilder_ == null) { + ensureTensorIsMutable(); + tensor_.add(index, builderForValue.build()); + onChanged(); + } else { + tensorBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public Builder addAllTensor( + java.lang.Iterable values) { + if (tensorBuilder_ == null) { + ensureTensorIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, tensor_); + onChanged(); + } else { + tensorBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public Builder clearTensor() { + if (tensorBuilder_ == null) { + tensor_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + } else { + tensorBuilder_.clear(); + } + return this; + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public Builder removeTensor(int index) { + if (tensorBuilder_ == null) { + ensureTensorIsMutable(); + tensor_.remove(index); + onChanged(); + } else { + tensorBuilder_.remove(index); + } + return this; + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProto.Builder getTensorBuilder( + int index) { + return getTensorFieldBuilder().getBuilder(index); + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProtoOrBuilder getTensorOrBuilder( + int index) { + if (tensorBuilder_ == null) { + return tensor_.get(index); } else { + return tensorBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public java.util.List + getTensorOrBuilderList() { + if (tensorBuilder_ != null) { + return tensorBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(tensor_); + } + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProto.Builder addTensorBuilder() { + return getTensorFieldBuilder().addBuilder( + org.tensorflow.framework.TensorProto.getDefaultInstance()); + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProto.Builder addTensorBuilder( + int index) { + return getTensorFieldBuilder().addBuilder( + index, org.tensorflow.framework.TensorProto.getDefaultInstance()); + } + /** + *
+       * "list(tensor)"
+       * 
+ * + * repeated .tensorflow.TensorProto tensor = 8; + */ + public java.util.List + getTensorBuilderList() { + return getTensorFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.TensorProto, org.tensorflow.framework.TensorProto.Builder, org.tensorflow.framework.TensorProtoOrBuilder> + getTensorFieldBuilder() { + if (tensorBuilder_ == null) { + tensorBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.TensorProto, org.tensorflow.framework.TensorProto.Builder, org.tensorflow.framework.TensorProtoOrBuilder>( + tensor_, + ((bitField0_ & 0x00000040) == 0x00000040), + getParentForChildren(), + isClean()); + tensor_ = null; + } + return tensorBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.AttrValue.ListValue) + } + + // @@protoc_insertion_point(class_scope:tensorflow.AttrValue.ListValue) + private static final org.tensorflow.framework.AttrValue.ListValue DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.AttrValue.ListValue(); + } + + public static org.tensorflow.framework.AttrValue.ListValue getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ListValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListValue(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.AttrValue.ListValue getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private int valueCase_ = 0; + private java.lang.Object value_; + public enum ValueCase + implements com.google.protobuf.Internal.EnumLite { + S(2), + I(3), + F(4), + B(5), + TYPE(6), + SHAPE(7), + TENSOR(8), + LIST(1), + FUNC(10), + PLACEHOLDER(9), + VALUE_NOT_SET(0); + private final int value; + private ValueCase(int value) { + this.value = value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static ValueCase valueOf(int value) { + return forNumber(value); + } + + public static ValueCase forNumber(int value) { + switch (value) { + case 2: return S; + case 3: return I; + case 4: return F; + case 5: return B; + case 6: return TYPE; + case 7: return SHAPE; + case 8: return TENSOR; + case 1: return LIST; + case 10: return FUNC; + case 9: return PLACEHOLDER; + case 0: return VALUE_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public ValueCase + getValueCase() { + return ValueCase.forNumber( + valueCase_); + } + + public static final int S_FIELD_NUMBER = 2; + /** + *
+   * "string"
+   * 
+ * + * optional bytes s = 2; + */ + public com.google.protobuf.ByteString getS() { + if (valueCase_ == 2) { + return (com.google.protobuf.ByteString) value_; + } + return com.google.protobuf.ByteString.EMPTY; + } + + public static final int I_FIELD_NUMBER = 3; + /** + *
+   * "int"
+   * 
+ * + * optional int64 i = 3; + */ + public long getI() { + if (valueCase_ == 3) { + return (java.lang.Long) value_; + } + return 0L; + } + + public static final int F_FIELD_NUMBER = 4; + /** + *
+   * "float"
+   * 
+ * + * optional float f = 4; + */ + public float getF() { + if (valueCase_ == 4) { + return (java.lang.Float) value_; + } + return 0F; + } + + public static final int B_FIELD_NUMBER = 5; + /** + *
+   * "bool"
+   * 
+ * + * optional bool b = 5; + */ + public boolean getB() { + if (valueCase_ == 5) { + return (java.lang.Boolean) value_; + } + return false; + } + + public static final int TYPE_FIELD_NUMBER = 6; + /** + *
+   * "type"
+   * 
+ * + * optional .tensorflow.DataType type = 6; + */ + public int getTypeValue() { + if (valueCase_ == 6) { + return (java.lang.Integer) value_; + } + return 0; + } + /** + *
+   * "type"
+   * 
+ * + * optional .tensorflow.DataType type = 6; + */ + public org.tensorflow.framework.DataType getType() { + if (valueCase_ == 6) { + org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf( + (java.lang.Integer) value_); + return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result; + } + return org.tensorflow.framework.DataType.DT_INVALID; + } + + public static final int SHAPE_FIELD_NUMBER = 7; + /** + *
+   * "shape"
+   * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProto getShape() { + if (valueCase_ == 7) { + return (org.tensorflow.framework.TensorShapeProto) value_; + } + return org.tensorflow.framework.TensorShapeProto.getDefaultInstance(); + } + /** + *
+   * "shape"
+   * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder() { + if (valueCase_ == 7) { + return (org.tensorflow.framework.TensorShapeProto) value_; + } + return org.tensorflow.framework.TensorShapeProto.getDefaultInstance(); + } + + public static final int TENSOR_FIELD_NUMBER = 8; + /** + *
+   * "tensor"
+   * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProto getTensor() { + if (valueCase_ == 8) { + return (org.tensorflow.framework.TensorProto) value_; + } + return org.tensorflow.framework.TensorProto.getDefaultInstance(); + } + /** + *
+   * "tensor"
+   * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProtoOrBuilder getTensorOrBuilder() { + if (valueCase_ == 8) { + return (org.tensorflow.framework.TensorProto) value_; + } + return org.tensorflow.framework.TensorProto.getDefaultInstance(); + } + + public static final int LIST_FIELD_NUMBER = 1; + /** + *
+   * any "list(...)"
+   * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + public org.tensorflow.framework.AttrValue.ListValue getList() { + if (valueCase_ == 1) { + return (org.tensorflow.framework.AttrValue.ListValue) value_; + } + return org.tensorflow.framework.AttrValue.ListValue.getDefaultInstance(); + } + /** + *
+   * any "list(...)"
+   * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + public org.tensorflow.framework.AttrValue.ListValueOrBuilder getListOrBuilder() { + if (valueCase_ == 1) { + return (org.tensorflow.framework.AttrValue.ListValue) value_; + } + return org.tensorflow.framework.AttrValue.ListValue.getDefaultInstance(); + } + + public static final int FUNC_FIELD_NUMBER = 10; + /** + *
+   * "func" represents a function. func.name is a function's name or
+   * a primitive op's name. func.attr.first is the name of an attr
+   * defined for that function. func.attr.second is the value for
+   * that attr in the instantiation.
+   * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + public org.tensorflow.framework.NameAttrList getFunc() { + if (valueCase_ == 10) { + return (org.tensorflow.framework.NameAttrList) value_; + } + return org.tensorflow.framework.NameAttrList.getDefaultInstance(); + } + /** + *
+   * "func" represents a function. func.name is a function's name or
+   * a primitive op's name. func.attr.first is the name of an attr
+   * defined for that function. func.attr.second is the value for
+   * that attr in the instantiation.
+   * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + public org.tensorflow.framework.NameAttrListOrBuilder getFuncOrBuilder() { + if (valueCase_ == 10) { + return (org.tensorflow.framework.NameAttrList) value_; + } + return org.tensorflow.framework.NameAttrList.getDefaultInstance(); + } + + public static final int PLACEHOLDER_FIELD_NUMBER = 9; + /** + *
+   * This is a placeholder only used in nodes defined inside a
+   * function.  It indicates the attr value will be supplied when
+   * the function is instantiated.  For example, let us suppose a
+   * node "N" in function "FN". "N" has an attr "A" with value
+   * placeholder = "foo". When FN is instantiated with attr "foo"
+   * set to "bar", the instantiated node N's attr A will have been
+   * given the value "bar".
+   * 
+ * + * optional string placeholder = 9; + */ + public java.lang.String getPlaceholder() { + java.lang.Object ref = ""; + if (valueCase_ == 9) { + ref = value_; + } + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (valueCase_ == 9) { + value_ = s; + } + return s; + } + } + /** + *
+   * This is a placeholder only used in nodes defined inside a
+   * function.  It indicates the attr value will be supplied when
+   * the function is instantiated.  For example, let us suppose a
+   * node "N" in function "FN". "N" has an attr "A" with value
+   * placeholder = "foo". When FN is instantiated with attr "foo"
+   * set to "bar", the instantiated node N's attr A will have been
+   * given the value "bar".
+   * 
+ * + * optional string placeholder = 9; + */ + public com.google.protobuf.ByteString + getPlaceholderBytes() { + java.lang.Object ref = ""; + if (valueCase_ == 9) { + ref = value_; + } + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + if (valueCase_ == 9) { + value_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (valueCase_ == 1) { + output.writeMessage(1, (org.tensorflow.framework.AttrValue.ListValue) value_); + } + if (valueCase_ == 2) { + output.writeBytes( + 2, (com.google.protobuf.ByteString)((com.google.protobuf.ByteString) value_)); + } + if (valueCase_ == 3) { + output.writeInt64( + 3, (long)((java.lang.Long) value_)); + } + if (valueCase_ == 4) { + output.writeFloat( + 4, (float)((java.lang.Float) value_)); + } + if (valueCase_ == 5) { + output.writeBool( + 5, (boolean)((java.lang.Boolean) value_)); + } + if (valueCase_ == 6) { + output.writeEnum(6, ((java.lang.Integer) value_)); + } + if (valueCase_ == 7) { + output.writeMessage(7, (org.tensorflow.framework.TensorShapeProto) value_); + } + if (valueCase_ == 8) { + output.writeMessage(8, (org.tensorflow.framework.TensorProto) value_); + } + if (valueCase_ == 9) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 9, value_); + } + if (valueCase_ == 10) { + output.writeMessage(10, (org.tensorflow.framework.NameAttrList) value_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (valueCase_ == 1) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, (org.tensorflow.framework.AttrValue.ListValue) value_); + } + if (valueCase_ == 2) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize( + 2, (com.google.protobuf.ByteString)((com.google.protobuf.ByteString) value_)); + } + if (valueCase_ == 3) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size( + 3, (long)((java.lang.Long) value_)); + } + if (valueCase_ == 4) { + size += com.google.protobuf.CodedOutputStream + .computeFloatSize( + 4, (float)((java.lang.Float) value_)); + } + if (valueCase_ == 5) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize( + 5, (boolean)((java.lang.Boolean) value_)); + } + if (valueCase_ == 6) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(6, ((java.lang.Integer) value_)); + } + if (valueCase_ == 7) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, (org.tensorflow.framework.TensorShapeProto) value_); + } + if (valueCase_ == 8) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, (org.tensorflow.framework.TensorProto) value_); + } + if (valueCase_ == 9) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, value_); + } + if (valueCase_ == 10) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, (org.tensorflow.framework.NameAttrList) value_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.AttrValue)) { + return super.equals(obj); + } + org.tensorflow.framework.AttrValue other = (org.tensorflow.framework.AttrValue) obj; + + boolean result = true; + result = result && getValueCase().equals( + other.getValueCase()); + if (!result) return false; + switch (valueCase_) { + case 2: + result = result && getS() + .equals(other.getS()); + break; + case 3: + result = result && (getI() + == other.getI()); + break; + case 4: + result = result && ( + java.lang.Float.floatToIntBits(getF()) + == java.lang.Float.floatToIntBits( + other.getF())); + break; + case 5: + result = result && (getB() + == other.getB()); + break; + case 6: + result = result && getTypeValue() + == other.getTypeValue(); + break; + case 7: + result = result && getShape() + .equals(other.getShape()); + break; + case 8: + result = result && getTensor() + .equals(other.getTensor()); + break; + case 1: + result = result && getList() + .equals(other.getList()); + break; + case 10: + result = result && getFunc() + .equals(other.getFunc()); + break; + case 9: + result = result && getPlaceholder() + .equals(other.getPlaceholder()); + break; + case 0: + default: + } + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + switch (valueCase_) { + case 2: + hash = (37 * hash) + S_FIELD_NUMBER; + hash = (53 * hash) + getS().hashCode(); + break; + case 3: + hash = (37 * hash) + I_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getI()); + break; + case 4: + hash = (37 * hash) + F_FIELD_NUMBER; + hash = (53 * hash) + java.lang.Float.floatToIntBits( + getF()); + break; + case 5: + hash = (37 * hash) + B_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getB()); + break; + case 6: + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getTypeValue(); + break; + case 7: + hash = (37 * hash) + SHAPE_FIELD_NUMBER; + hash = (53 * hash) + getShape().hashCode(); + break; + case 8: + hash = (37 * hash) + TENSOR_FIELD_NUMBER; + hash = (53 * hash) + getTensor().hashCode(); + break; + case 1: + hash = (37 * hash) + LIST_FIELD_NUMBER; + hash = (53 * hash) + getList().hashCode(); + break; + case 10: + hash = (37 * hash) + FUNC_FIELD_NUMBER; + hash = (53 * hash) + getFunc().hashCode(); + break; + case 9: + hash = (37 * hash) + PLACEHOLDER_FIELD_NUMBER; + hash = (53 * hash) + getPlaceholder().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.AttrValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.AttrValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.AttrValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.AttrValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.AttrValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AttrValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.AttrValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AttrValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.AttrValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.AttrValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.AttrValue prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Protocol buffer representing the value for an attr used to configure an Op.
+   * Comment indicates the corresponding attr type.  Only the field matching the
+   * attr type may be filled.
+   * 
+ * + * Protobuf type {@code tensorflow.AttrValue} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.AttrValue) + org.tensorflow.framework.AttrValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.AttrValue.class, org.tensorflow.framework.AttrValue.Builder.class); + } + + // Construct using org.tensorflow.framework.AttrValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + valueCase_ = 0; + value_ = null; + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_AttrValue_descriptor; + } + + public org.tensorflow.framework.AttrValue getDefaultInstanceForType() { + return org.tensorflow.framework.AttrValue.getDefaultInstance(); + } + + public org.tensorflow.framework.AttrValue build() { + org.tensorflow.framework.AttrValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.AttrValue buildPartial() { + org.tensorflow.framework.AttrValue result = new org.tensorflow.framework.AttrValue(this); + if (valueCase_ == 2) { + result.value_ = value_; + } + if (valueCase_ == 3) { + result.value_ = value_; + } + if (valueCase_ == 4) { + result.value_ = value_; + } + if (valueCase_ == 5) { + result.value_ = value_; + } + if (valueCase_ == 6) { + result.value_ = value_; + } + if (valueCase_ == 7) { + if (shapeBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = shapeBuilder_.build(); + } + } + if (valueCase_ == 8) { + if (tensorBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = tensorBuilder_.build(); + } + } + if (valueCase_ == 1) { + if (listBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = listBuilder_.build(); + } + } + if (valueCase_ == 10) { + if (funcBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = funcBuilder_.build(); + } + } + if (valueCase_ == 9) { + result.value_ = value_; + } + result.valueCase_ = valueCase_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.AttrValue) { + return mergeFrom((org.tensorflow.framework.AttrValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.AttrValue other) { + if (other == org.tensorflow.framework.AttrValue.getDefaultInstance()) return this; + switch (other.getValueCase()) { + case S: { + setS(other.getS()); + break; + } + case I: { + setI(other.getI()); + break; + } + case F: { + setF(other.getF()); + break; + } + case B: { + setB(other.getB()); + break; + } + case TYPE: { + setTypeValue(other.getTypeValue()); + break; + } + case SHAPE: { + mergeShape(other.getShape()); + break; + } + case TENSOR: { + mergeTensor(other.getTensor()); + break; + } + case LIST: { + mergeList(other.getList()); + break; + } + case FUNC: { + mergeFunc(other.getFunc()); + break; + } + case PLACEHOLDER: { + valueCase_ = 9; + value_ = other.value_; + onChanged(); + break; + } + case VALUE_NOT_SET: { + break; + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.AttrValue parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.AttrValue) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int valueCase_ = 0; + private java.lang.Object value_; + public ValueCase + getValueCase() { + return ValueCase.forNumber( + valueCase_); + } + + public Builder clearValue() { + valueCase_ = 0; + value_ = null; + onChanged(); + return this; + } + + + /** + *
+     * "string"
+     * 
+ * + * optional bytes s = 2; + */ + public com.google.protobuf.ByteString getS() { + if (valueCase_ == 2) { + return (com.google.protobuf.ByteString) value_; + } + return com.google.protobuf.ByteString.EMPTY; + } + /** + *
+     * "string"
+     * 
+ * + * optional bytes s = 2; + */ + public Builder setS(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + valueCase_ = 2; + value_ = value; + onChanged(); + return this; + } + /** + *
+     * "string"
+     * 
+ * + * optional bytes s = 2; + */ + public Builder clearS() { + if (valueCase_ == 2) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + + /** + *
+     * "int"
+     * 
+ * + * optional int64 i = 3; + */ + public long getI() { + if (valueCase_ == 3) { + return (java.lang.Long) value_; + } + return 0L; + } + /** + *
+     * "int"
+     * 
+ * + * optional int64 i = 3; + */ + public Builder setI(long value) { + valueCase_ = 3; + value_ = value; + onChanged(); + return this; + } + /** + *
+     * "int"
+     * 
+ * + * optional int64 i = 3; + */ + public Builder clearI() { + if (valueCase_ == 3) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + + /** + *
+     * "float"
+     * 
+ * + * optional float f = 4; + */ + public float getF() { + if (valueCase_ == 4) { + return (java.lang.Float) value_; + } + return 0F; + } + /** + *
+     * "float"
+     * 
+ * + * optional float f = 4; + */ + public Builder setF(float value) { + valueCase_ = 4; + value_ = value; + onChanged(); + return this; + } + /** + *
+     * "float"
+     * 
+ * + * optional float f = 4; + */ + public Builder clearF() { + if (valueCase_ == 4) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + + /** + *
+     * "bool"
+     * 
+ * + * optional bool b = 5; + */ + public boolean getB() { + if (valueCase_ == 5) { + return (java.lang.Boolean) value_; + } + return false; + } + /** + *
+     * "bool"
+     * 
+ * + * optional bool b = 5; + */ + public Builder setB(boolean value) { + valueCase_ = 5; + value_ = value; + onChanged(); + return this; + } + /** + *
+     * "bool"
+     * 
+ * + * optional bool b = 5; + */ + public Builder clearB() { + if (valueCase_ == 5) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + + /** + *
+     * "type"
+     * 
+ * + * optional .tensorflow.DataType type = 6; + */ + public int getTypeValue() { + if (valueCase_ == 6) { + return ((java.lang.Integer) value_).intValue(); + } + return 0; + } + /** + *
+     * "type"
+     * 
+ * + * optional .tensorflow.DataType type = 6; + */ + public Builder setTypeValue(int value) { + valueCase_ = 6; + value_ = value; + onChanged(); + return this; + } + /** + *
+     * "type"
+     * 
+ * + * optional .tensorflow.DataType type = 6; + */ + public org.tensorflow.framework.DataType getType() { + if (valueCase_ == 6) { + org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf( + (java.lang.Integer) value_); + return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result; + } + return org.tensorflow.framework.DataType.DT_INVALID; + } + /** + *
+     * "type"
+     * 
+ * + * optional .tensorflow.DataType type = 6; + */ + public Builder setType(org.tensorflow.framework.DataType value) { + if (value == null) { + throw new NullPointerException(); + } + valueCase_ = 6; + value_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+     * "type"
+     * 
+ * + * optional .tensorflow.DataType type = 6; + */ + public Builder clearType() { + if (valueCase_ == 6) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder> shapeBuilder_; + /** + *
+     * "shape"
+     * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProto getShape() { + if (shapeBuilder_ == null) { + if (valueCase_ == 7) { + return (org.tensorflow.framework.TensorShapeProto) value_; + } + return org.tensorflow.framework.TensorShapeProto.getDefaultInstance(); + } else { + if (valueCase_ == 7) { + return shapeBuilder_.getMessage(); + } + return org.tensorflow.framework.TensorShapeProto.getDefaultInstance(); + } + } + /** + *
+     * "shape"
+     * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + public Builder setShape(org.tensorflow.framework.TensorShapeProto value) { + if (shapeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + shapeBuilder_.setMessage(value); + } + valueCase_ = 7; + return this; + } + /** + *
+     * "shape"
+     * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + public Builder setShape( + org.tensorflow.framework.TensorShapeProto.Builder builderForValue) { + if (shapeBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + shapeBuilder_.setMessage(builderForValue.build()); + } + valueCase_ = 7; + return this; + } + /** + *
+     * "shape"
+     * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + public Builder mergeShape(org.tensorflow.framework.TensorShapeProto value) { + if (shapeBuilder_ == null) { + if (valueCase_ == 7 && + value_ != org.tensorflow.framework.TensorShapeProto.getDefaultInstance()) { + value_ = org.tensorflow.framework.TensorShapeProto.newBuilder((org.tensorflow.framework.TensorShapeProto) value_) + .mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + if (valueCase_ == 7) { + shapeBuilder_.mergeFrom(value); + } + shapeBuilder_.setMessage(value); + } + valueCase_ = 7; + return this; + } + /** + *
+     * "shape"
+     * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + public Builder clearShape() { + if (shapeBuilder_ == null) { + if (valueCase_ == 7) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + } else { + if (valueCase_ == 7) { + valueCase_ = 0; + value_ = null; + } + shapeBuilder_.clear(); + } + return this; + } + /** + *
+     * "shape"
+     * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProto.Builder getShapeBuilder() { + return getShapeFieldBuilder().getBuilder(); + } + /** + *
+     * "shape"
+     * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + public org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder() { + if ((valueCase_ == 7) && (shapeBuilder_ != null)) { + return shapeBuilder_.getMessageOrBuilder(); + } else { + if (valueCase_ == 7) { + return (org.tensorflow.framework.TensorShapeProto) value_; + } + return org.tensorflow.framework.TensorShapeProto.getDefaultInstance(); + } + } + /** + *
+     * "shape"
+     * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder> + getShapeFieldBuilder() { + if (shapeBuilder_ == null) { + if (!(valueCase_ == 7)) { + value_ = org.tensorflow.framework.TensorShapeProto.getDefaultInstance(); + } + shapeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder>( + (org.tensorflow.framework.TensorShapeProto) value_, + getParentForChildren(), + isClean()); + value_ = null; + } + valueCase_ = 7; + onChanged();; + return shapeBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorProto, org.tensorflow.framework.TensorProto.Builder, org.tensorflow.framework.TensorProtoOrBuilder> tensorBuilder_; + /** + *
+     * "tensor"
+     * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProto getTensor() { + if (tensorBuilder_ == null) { + if (valueCase_ == 8) { + return (org.tensorflow.framework.TensorProto) value_; + } + return org.tensorflow.framework.TensorProto.getDefaultInstance(); + } else { + if (valueCase_ == 8) { + return tensorBuilder_.getMessage(); + } + return org.tensorflow.framework.TensorProto.getDefaultInstance(); + } + } + /** + *
+     * "tensor"
+     * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + public Builder setTensor(org.tensorflow.framework.TensorProto value) { + if (tensorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + tensorBuilder_.setMessage(value); + } + valueCase_ = 8; + return this; + } + /** + *
+     * "tensor"
+     * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + public Builder setTensor( + org.tensorflow.framework.TensorProto.Builder builderForValue) { + if (tensorBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + tensorBuilder_.setMessage(builderForValue.build()); + } + valueCase_ = 8; + return this; + } + /** + *
+     * "tensor"
+     * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + public Builder mergeTensor(org.tensorflow.framework.TensorProto value) { + if (tensorBuilder_ == null) { + if (valueCase_ == 8 && + value_ != org.tensorflow.framework.TensorProto.getDefaultInstance()) { + value_ = org.tensorflow.framework.TensorProto.newBuilder((org.tensorflow.framework.TensorProto) value_) + .mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + if (valueCase_ == 8) { + tensorBuilder_.mergeFrom(value); + } + tensorBuilder_.setMessage(value); + } + valueCase_ = 8; + return this; + } + /** + *
+     * "tensor"
+     * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + public Builder clearTensor() { + if (tensorBuilder_ == null) { + if (valueCase_ == 8) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + } else { + if (valueCase_ == 8) { + valueCase_ = 0; + value_ = null; + } + tensorBuilder_.clear(); + } + return this; + } + /** + *
+     * "tensor"
+     * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProto.Builder getTensorBuilder() { + return getTensorFieldBuilder().getBuilder(); + } + /** + *
+     * "tensor"
+     * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + public org.tensorflow.framework.TensorProtoOrBuilder getTensorOrBuilder() { + if ((valueCase_ == 8) && (tensorBuilder_ != null)) { + return tensorBuilder_.getMessageOrBuilder(); + } else { + if (valueCase_ == 8) { + return (org.tensorflow.framework.TensorProto) value_; + } + return org.tensorflow.framework.TensorProto.getDefaultInstance(); + } + } + /** + *
+     * "tensor"
+     * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorProto, org.tensorflow.framework.TensorProto.Builder, org.tensorflow.framework.TensorProtoOrBuilder> + getTensorFieldBuilder() { + if (tensorBuilder_ == null) { + if (!(valueCase_ == 8)) { + value_ = org.tensorflow.framework.TensorProto.getDefaultInstance(); + } + tensorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorProto, org.tensorflow.framework.TensorProto.Builder, org.tensorflow.framework.TensorProtoOrBuilder>( + (org.tensorflow.framework.TensorProto) value_, + getParentForChildren(), + isClean()); + value_ = null; + } + valueCase_ = 8; + onChanged();; + return tensorBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue.ListValue, org.tensorflow.framework.AttrValue.ListValue.Builder, org.tensorflow.framework.AttrValue.ListValueOrBuilder> listBuilder_; + /** + *
+     * any "list(...)"
+     * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + public org.tensorflow.framework.AttrValue.ListValue getList() { + if (listBuilder_ == null) { + if (valueCase_ == 1) { + return (org.tensorflow.framework.AttrValue.ListValue) value_; + } + return org.tensorflow.framework.AttrValue.ListValue.getDefaultInstance(); + } else { + if (valueCase_ == 1) { + return listBuilder_.getMessage(); + } + return org.tensorflow.framework.AttrValue.ListValue.getDefaultInstance(); + } + } + /** + *
+     * any "list(...)"
+     * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + public Builder setList(org.tensorflow.framework.AttrValue.ListValue value) { + if (listBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + listBuilder_.setMessage(value); + } + valueCase_ = 1; + return this; + } + /** + *
+     * any "list(...)"
+     * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + public Builder setList( + org.tensorflow.framework.AttrValue.ListValue.Builder builderForValue) { + if (listBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + listBuilder_.setMessage(builderForValue.build()); + } + valueCase_ = 1; + return this; + } + /** + *
+     * any "list(...)"
+     * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + public Builder mergeList(org.tensorflow.framework.AttrValue.ListValue value) { + if (listBuilder_ == null) { + if (valueCase_ == 1 && + value_ != org.tensorflow.framework.AttrValue.ListValue.getDefaultInstance()) { + value_ = org.tensorflow.framework.AttrValue.ListValue.newBuilder((org.tensorflow.framework.AttrValue.ListValue) value_) + .mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + if (valueCase_ == 1) { + listBuilder_.mergeFrom(value); + } + listBuilder_.setMessage(value); + } + valueCase_ = 1; + return this; + } + /** + *
+     * any "list(...)"
+     * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + public Builder clearList() { + if (listBuilder_ == null) { + if (valueCase_ == 1) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + } else { + if (valueCase_ == 1) { + valueCase_ = 0; + value_ = null; + } + listBuilder_.clear(); + } + return this; + } + /** + *
+     * any "list(...)"
+     * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + public org.tensorflow.framework.AttrValue.ListValue.Builder getListBuilder() { + return getListFieldBuilder().getBuilder(); + } + /** + *
+     * any "list(...)"
+     * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + public org.tensorflow.framework.AttrValue.ListValueOrBuilder getListOrBuilder() { + if ((valueCase_ == 1) && (listBuilder_ != null)) { + return listBuilder_.getMessageOrBuilder(); + } else { + if (valueCase_ == 1) { + return (org.tensorflow.framework.AttrValue.ListValue) value_; + } + return org.tensorflow.framework.AttrValue.ListValue.getDefaultInstance(); + } + } + /** + *
+     * any "list(...)"
+     * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue.ListValue, org.tensorflow.framework.AttrValue.ListValue.Builder, org.tensorflow.framework.AttrValue.ListValueOrBuilder> + getListFieldBuilder() { + if (listBuilder_ == null) { + if (!(valueCase_ == 1)) { + value_ = org.tensorflow.framework.AttrValue.ListValue.getDefaultInstance(); + } + listBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue.ListValue, org.tensorflow.framework.AttrValue.ListValue.Builder, org.tensorflow.framework.AttrValue.ListValueOrBuilder>( + (org.tensorflow.framework.AttrValue.ListValue) value_, + getParentForChildren(), + isClean()); + value_ = null; + } + valueCase_ = 1; + onChanged();; + return listBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.NameAttrList, org.tensorflow.framework.NameAttrList.Builder, org.tensorflow.framework.NameAttrListOrBuilder> funcBuilder_; + /** + *
+     * "func" represents a function. func.name is a function's name or
+     * a primitive op's name. func.attr.first is the name of an attr
+     * defined for that function. func.attr.second is the value for
+     * that attr in the instantiation.
+     * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + public org.tensorflow.framework.NameAttrList getFunc() { + if (funcBuilder_ == null) { + if (valueCase_ == 10) { + return (org.tensorflow.framework.NameAttrList) value_; + } + return org.tensorflow.framework.NameAttrList.getDefaultInstance(); + } else { + if (valueCase_ == 10) { + return funcBuilder_.getMessage(); + } + return org.tensorflow.framework.NameAttrList.getDefaultInstance(); + } + } + /** + *
+     * "func" represents a function. func.name is a function's name or
+     * a primitive op's name. func.attr.first is the name of an attr
+     * defined for that function. func.attr.second is the value for
+     * that attr in the instantiation.
+     * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + public Builder setFunc(org.tensorflow.framework.NameAttrList value) { + if (funcBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + funcBuilder_.setMessage(value); + } + valueCase_ = 10; + return this; + } + /** + *
+     * "func" represents a function. func.name is a function's name or
+     * a primitive op's name. func.attr.first is the name of an attr
+     * defined for that function. func.attr.second is the value for
+     * that attr in the instantiation.
+     * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + public Builder setFunc( + org.tensorflow.framework.NameAttrList.Builder builderForValue) { + if (funcBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + funcBuilder_.setMessage(builderForValue.build()); + } + valueCase_ = 10; + return this; + } + /** + *
+     * "func" represents a function. func.name is a function's name or
+     * a primitive op's name. func.attr.first is the name of an attr
+     * defined for that function. func.attr.second is the value for
+     * that attr in the instantiation.
+     * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + public Builder mergeFunc(org.tensorflow.framework.NameAttrList value) { + if (funcBuilder_ == null) { + if (valueCase_ == 10 && + value_ != org.tensorflow.framework.NameAttrList.getDefaultInstance()) { + value_ = org.tensorflow.framework.NameAttrList.newBuilder((org.tensorflow.framework.NameAttrList) value_) + .mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + if (valueCase_ == 10) { + funcBuilder_.mergeFrom(value); + } + funcBuilder_.setMessage(value); + } + valueCase_ = 10; + return this; + } + /** + *
+     * "func" represents a function. func.name is a function's name or
+     * a primitive op's name. func.attr.first is the name of an attr
+     * defined for that function. func.attr.second is the value for
+     * that attr in the instantiation.
+     * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + public Builder clearFunc() { + if (funcBuilder_ == null) { + if (valueCase_ == 10) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + } else { + if (valueCase_ == 10) { + valueCase_ = 0; + value_ = null; + } + funcBuilder_.clear(); + } + return this; + } + /** + *
+     * "func" represents a function. func.name is a function's name or
+     * a primitive op's name. func.attr.first is the name of an attr
+     * defined for that function. func.attr.second is the value for
+     * that attr in the instantiation.
+     * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + public org.tensorflow.framework.NameAttrList.Builder getFuncBuilder() { + return getFuncFieldBuilder().getBuilder(); + } + /** + *
+     * "func" represents a function. func.name is a function's name or
+     * a primitive op's name. func.attr.first is the name of an attr
+     * defined for that function. func.attr.second is the value for
+     * that attr in the instantiation.
+     * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + public org.tensorflow.framework.NameAttrListOrBuilder getFuncOrBuilder() { + if ((valueCase_ == 10) && (funcBuilder_ != null)) { + return funcBuilder_.getMessageOrBuilder(); + } else { + if (valueCase_ == 10) { + return (org.tensorflow.framework.NameAttrList) value_; + } + return org.tensorflow.framework.NameAttrList.getDefaultInstance(); + } + } + /** + *
+     * "func" represents a function. func.name is a function's name or
+     * a primitive op's name. func.attr.first is the name of an attr
+     * defined for that function. func.attr.second is the value for
+     * that attr in the instantiation.
+     * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.NameAttrList, org.tensorflow.framework.NameAttrList.Builder, org.tensorflow.framework.NameAttrListOrBuilder> + getFuncFieldBuilder() { + if (funcBuilder_ == null) { + if (!(valueCase_ == 10)) { + value_ = org.tensorflow.framework.NameAttrList.getDefaultInstance(); + } + funcBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.NameAttrList, org.tensorflow.framework.NameAttrList.Builder, org.tensorflow.framework.NameAttrListOrBuilder>( + (org.tensorflow.framework.NameAttrList) value_, + getParentForChildren(), + isClean()); + value_ = null; + } + valueCase_ = 10; + onChanged();; + return funcBuilder_; + } + + /** + *
+     * This is a placeholder only used in nodes defined inside a
+     * function.  It indicates the attr value will be supplied when
+     * the function is instantiated.  For example, let us suppose a
+     * node "N" in function "FN". "N" has an attr "A" with value
+     * placeholder = "foo". When FN is instantiated with attr "foo"
+     * set to "bar", the instantiated node N's attr A will have been
+     * given the value "bar".
+     * 
+ * + * optional string placeholder = 9; + */ + public java.lang.String getPlaceholder() { + java.lang.Object ref = ""; + if (valueCase_ == 9) { + ref = value_; + } + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (valueCase_ == 9) { + value_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * This is a placeholder only used in nodes defined inside a
+     * function.  It indicates the attr value will be supplied when
+     * the function is instantiated.  For example, let us suppose a
+     * node "N" in function "FN". "N" has an attr "A" with value
+     * placeholder = "foo". When FN is instantiated with attr "foo"
+     * set to "bar", the instantiated node N's attr A will have been
+     * given the value "bar".
+     * 
+ * + * optional string placeholder = 9; + */ + public com.google.protobuf.ByteString + getPlaceholderBytes() { + java.lang.Object ref = ""; + if (valueCase_ == 9) { + ref = value_; + } + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + if (valueCase_ == 9) { + value_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * This is a placeholder only used in nodes defined inside a
+     * function.  It indicates the attr value will be supplied when
+     * the function is instantiated.  For example, let us suppose a
+     * node "N" in function "FN". "N" has an attr "A" with value
+     * placeholder = "foo". When FN is instantiated with attr "foo"
+     * set to "bar", the instantiated node N's attr A will have been
+     * given the value "bar".
+     * 
+ * + * optional string placeholder = 9; + */ + public Builder setPlaceholder( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + valueCase_ = 9; + value_ = value; + onChanged(); + return this; + } + /** + *
+     * This is a placeholder only used in nodes defined inside a
+     * function.  It indicates the attr value will be supplied when
+     * the function is instantiated.  For example, let us suppose a
+     * node "N" in function "FN". "N" has an attr "A" with value
+     * placeholder = "foo". When FN is instantiated with attr "foo"
+     * set to "bar", the instantiated node N's attr A will have been
+     * given the value "bar".
+     * 
+ * + * optional string placeholder = 9; + */ + public Builder clearPlaceholder() { + if (valueCase_ == 9) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + /** + *
+     * This is a placeholder only used in nodes defined inside a
+     * function.  It indicates the attr value will be supplied when
+     * the function is instantiated.  For example, let us suppose a
+     * node "N" in function "FN". "N" has an attr "A" with value
+     * placeholder = "foo". When FN is instantiated with attr "foo"
+     * set to "bar", the instantiated node N's attr A will have been
+     * given the value "bar".
+     * 
+ * + * optional string placeholder = 9; + */ + public Builder setPlaceholderBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + valueCase_ = 9; + value_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.AttrValue) + } + + // @@protoc_insertion_point(class_scope:tensorflow.AttrValue) + private static final org.tensorflow.framework.AttrValue DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.AttrValue(); + } + + public static org.tensorflow.framework.AttrValue getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AttrValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AttrValue(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.AttrValue getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValueOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValueOrBuilder.java new file mode 100644 index 0000000000000..e792f4bb7039c --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValueOrBuilder.java @@ -0,0 +1,168 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/attr_value.proto + +package org.tensorflow.framework; + +public interface AttrValueOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.AttrValue) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * "string"
+   * 
+ * + * optional bytes s = 2; + */ + com.google.protobuf.ByteString getS(); + + /** + *
+   * "int"
+   * 
+ * + * optional int64 i = 3; + */ + long getI(); + + /** + *
+   * "float"
+   * 
+ * + * optional float f = 4; + */ + float getF(); + + /** + *
+   * "bool"
+   * 
+ * + * optional bool b = 5; + */ + boolean getB(); + + /** + *
+   * "type"
+   * 
+ * + * optional .tensorflow.DataType type = 6; + */ + int getTypeValue(); + /** + *
+   * "type"
+   * 
+ * + * optional .tensorflow.DataType type = 6; + */ + org.tensorflow.framework.DataType getType(); + + /** + *
+   * "shape"
+   * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + org.tensorflow.framework.TensorShapeProto getShape(); + /** + *
+   * "shape"
+   * 
+ * + * optional .tensorflow.TensorShapeProto shape = 7; + */ + org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder(); + + /** + *
+   * "tensor"
+   * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + org.tensorflow.framework.TensorProto getTensor(); + /** + *
+   * "tensor"
+   * 
+ * + * optional .tensorflow.TensorProto tensor = 8; + */ + org.tensorflow.framework.TensorProtoOrBuilder getTensorOrBuilder(); + + /** + *
+   * any "list(...)"
+   * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + org.tensorflow.framework.AttrValue.ListValue getList(); + /** + *
+   * any "list(...)"
+   * 
+ * + * optional .tensorflow.AttrValue.ListValue list = 1; + */ + org.tensorflow.framework.AttrValue.ListValueOrBuilder getListOrBuilder(); + + /** + *
+   * "func" represents a function. func.name is a function's name or
+   * a primitive op's name. func.attr.first is the name of an attr
+   * defined for that function. func.attr.second is the value for
+   * that attr in the instantiation.
+   * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + org.tensorflow.framework.NameAttrList getFunc(); + /** + *
+   * "func" represents a function. func.name is a function's name or
+   * a primitive op's name. func.attr.first is the name of an attr
+   * defined for that function. func.attr.second is the value for
+   * that attr in the instantiation.
+   * 
+ * + * optional .tensorflow.NameAttrList func = 10; + */ + org.tensorflow.framework.NameAttrListOrBuilder getFuncOrBuilder(); + + /** + *
+   * This is a placeholder only used in nodes defined inside a
+   * function.  It indicates the attr value will be supplied when
+   * the function is instantiated.  For example, let us suppose a
+   * node "N" in function "FN". "N" has an attr "A" with value
+   * placeholder = "foo". When FN is instantiated with attr "foo"
+   * set to "bar", the instantiated node N's attr A will have been
+   * given the value "bar".
+   * 
+ * + * optional string placeholder = 9; + */ + java.lang.String getPlaceholder(); + /** + *
+   * This is a placeholder only used in nodes defined inside a
+   * function.  It indicates the attr value will be supplied when
+   * the function is instantiated.  For example, let us suppose a
+   * node "N" in function "FN". "N" has an attr "A" with value
+   * placeholder = "foo". When FN is instantiated with attr "foo"
+   * set to "bar", the instantiated node N's attr A will have been
+   * given the value "bar".
+   * 
+ * + * optional string placeholder = 9; + */ + com.google.protobuf.ByteString + getPlaceholderBytes(); + + public org.tensorflow.framework.AttrValue.ValueCase getValueCase(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValueProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValueProtos.java new file mode 100644 index 0000000000000..acf16f8d8b05d --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/AttrValueProtos.java @@ -0,0 +1,115 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/attr_value.proto + +package org.tensorflow.framework; + +public final class AttrValueProtos { + private AttrValueProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_AttrValue_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_AttrValue_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_AttrValue_ListValue_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_AttrValue_ListValue_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_NameAttrList_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_NameAttrList_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_NameAttrList_AttrEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_NameAttrList_AttrEntry_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n*tensorflow/core/framework/attr_value.p" + + "roto\022\ntensorflow\032&tensorflow/core/framew" + + "ork/tensor.proto\032,tensorflow/core/framew" + + "ork/tensor_shape.proto\032%tensorflow/core/" + + "framework/types.proto\"\376\003\n\tAttrValue\022\013\n\001s" + + "\030\002 \001(\014H\000\022\013\n\001i\030\003 \001(\003H\000\022\013\n\001f\030\004 \001(\002H\000\022\013\n\001b\030" + + "\005 \001(\010H\000\022$\n\004type\030\006 \001(\0162\024.tensorflow.DataT" + + "ypeH\000\022-\n\005shape\030\007 \001(\0132\034.tensorflow.Tensor" + + "ShapeProtoH\000\022)\n\006tensor\030\010 \001(\0132\027.tensorflo" + + "w.TensorProtoH\000\022/\n\004list\030\001 \001(\0132\037.tensorfl", + "ow.AttrValue.ListValueH\000\022(\n\004func\030\n \001(\0132\030" + + ".tensorflow.NameAttrListH\000\022\025\n\013placeholde" + + "r\030\t \001(\tH\000\032\301\001\n\tListValue\022\t\n\001s\030\002 \003(\014\022\r\n\001i\030" + + "\003 \003(\003B\002\020\001\022\r\n\001f\030\004 \003(\002B\002\020\001\022\r\n\001b\030\005 \003(\010B\002\020\001\022" + + "&\n\004type\030\006 \003(\0162\024.tensorflow.DataTypeB\002\020\001\022" + + "+\n\005shape\030\007 \003(\0132\034.tensorflow.TensorShapeP" + + "roto\022\'\n\006tensor\030\010 \003(\0132\027.tensorflow.Tensor" + + "ProtoB\007\n\005value\"\222\001\n\014NameAttrList\022\014\n\004name\030" + + "\001 \001(\t\0220\n\004attr\030\002 \003(\0132\".tensorflow.NameAtt" + + "rList.AttrEntry\032B\n\tAttrEntry\022\013\n\003key\030\001 \001(", + "\t\022$\n\005value\030\002 \001(\0132\025.tensorflow.AttrValue:" + + "\0028\001B0\n\030org.tensorflow.frameworkB\017AttrVal" + + "ueProtosP\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.TensorProtos.getDescriptor(), + org.tensorflow.framework.TensorShapeProtos.getDescriptor(), + org.tensorflow.framework.TypesProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_AttrValue_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_AttrValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_AttrValue_descriptor, + new java.lang.String[] { "S", "I", "F", "B", "Type", "Shape", "Tensor", "List", "Func", "Placeholder", "Value", }); + internal_static_tensorflow_AttrValue_ListValue_descriptor = + internal_static_tensorflow_AttrValue_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_AttrValue_ListValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_AttrValue_ListValue_descriptor, + new java.lang.String[] { "S", "I", "F", "B", "Type", "Shape", "Tensor", }); + internal_static_tensorflow_NameAttrList_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_tensorflow_NameAttrList_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_NameAttrList_descriptor, + new java.lang.String[] { "Name", "Attr", }); + internal_static_tensorflow_NameAttrList_AttrEntry_descriptor = + internal_static_tensorflow_NameAttrList_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_NameAttrList_AttrEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_NameAttrList_AttrEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + org.tensorflow.framework.TensorProtos.getDescriptor(); + org.tensorflow.framework.TensorShapeProtos.getDescriptor(); + org.tensorflow.framework.TypesProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProto.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProto.java new file mode 100644 index 0000000000000..2d9ab94589c30 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProto.java @@ -0,0 +1,2597 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +/** + *
+ * Session configuration parameters.
+ * The system picks appropriate values for fields that are not set.
+ * 
+ * + * Protobuf type {@code tensorflow.ConfigProto} + */ +public final class ConfigProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.ConfigProto) + ConfigProtoOrBuilder { + // Use ConfigProto.newBuilder() to construct. + private ConfigProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ConfigProto() { + intraOpParallelismThreads_ = 0; + interOpParallelismThreads_ = 0; + usePerSessionThreads_ = false; + sessionInterOpThreadPool_ = java.util.Collections.emptyList(); + placementPeriod_ = 0; + deviceFilters_ = com.google.protobuf.LazyStringArrayList.EMPTY; + allowSoftPlacement_ = false; + logDevicePlacement_ = false; + operationTimeoutInMs_ = 0L; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private ConfigProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + deviceCount_ = com.google.protobuf.MapField.newMapField( + DeviceCountDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000001; + } + com.google.protobuf.MapEntry + deviceCount__ = input.readMessage( + DeviceCountDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + deviceCount_.getMutableMap().put( + deviceCount__.getKey(), deviceCount__.getValue()); + break; + } + case 16: { + + intraOpParallelismThreads_ = input.readInt32(); + break; + } + case 24: { + + placementPeriod_ = input.readInt32(); + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + deviceFilters_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000040; + } + deviceFilters_.add(s); + break; + } + case 40: { + + interOpParallelismThreads_ = input.readInt32(); + break; + } + case 50: { + org.tensorflow.framework.GPUOptions.Builder subBuilder = null; + if (gpuOptions_ != null) { + subBuilder = gpuOptions_.toBuilder(); + } + gpuOptions_ = input.readMessage(org.tensorflow.framework.GPUOptions.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(gpuOptions_); + gpuOptions_ = subBuilder.buildPartial(); + } + + break; + } + case 56: { + + allowSoftPlacement_ = input.readBool(); + break; + } + case 64: { + + logDevicePlacement_ = input.readBool(); + break; + } + case 72: { + + usePerSessionThreads_ = input.readBool(); + break; + } + case 82: { + org.tensorflow.framework.GraphOptions.Builder subBuilder = null; + if (graphOptions_ != null) { + subBuilder = graphOptions_.toBuilder(); + } + graphOptions_ = input.readMessage(org.tensorflow.framework.GraphOptions.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(graphOptions_); + graphOptions_ = subBuilder.buildPartial(); + } + + break; + } + case 88: { + + operationTimeoutInMs_ = input.readInt64(); + break; + } + case 98: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + sessionInterOpThreadPool_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000010; + } + sessionInterOpThreadPool_.add( + input.readMessage(org.tensorflow.framework.ThreadPoolOptionProto.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + deviceFilters_ = deviceFilters_.getUnmodifiableView(); + } + if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + sessionInterOpThreadPool_ = java.util.Collections.unmodifiableList(sessionInterOpThreadPool_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_ConfigProto_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetDeviceCount(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_ConfigProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.ConfigProto.class, org.tensorflow.framework.ConfigProto.Builder.class); + } + + private int bitField0_; + public static final int DEVICE_COUNT_FIELD_NUMBER = 1; + private static final class DeviceCountDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.Integer> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_ConfigProto_DeviceCountEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.INT32, + 0); + } + private com.google.protobuf.MapField< + java.lang.String, java.lang.Integer> deviceCount_; + private com.google.protobuf.MapField + internalGetDeviceCount() { + if (deviceCount_ == null) { + return com.google.protobuf.MapField.emptyMapField( + DeviceCountDefaultEntryHolder.defaultEntry); + } + return deviceCount_; + } + + public int getDeviceCountCount() { + return internalGetDeviceCount().getMap().size(); + } + /** + *
+   * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+   * number of devices of that type to use.  If a particular device
+   * type is not found in the map, the system picks an appropriate
+   * number.
+   * 
+ * + * map<string, int32> device_count = 1; + */ + + public boolean containsDeviceCount( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetDeviceCount().getMap().containsKey(key); + } + /** + * Use {@link #getDeviceCountMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getDeviceCount() { + return getDeviceCountMap(); + } + /** + *
+   * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+   * number of devices of that type to use.  If a particular device
+   * type is not found in the map, the system picks an appropriate
+   * number.
+   * 
+ * + * map<string, int32> device_count = 1; + */ + + public java.util.Map getDeviceCountMap() { + return internalGetDeviceCount().getMap(); + } + /** + *
+   * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+   * number of devices of that type to use.  If a particular device
+   * type is not found in the map, the system picks an appropriate
+   * number.
+   * 
+ * + * map<string, int32> device_count = 1; + */ + + public int getDeviceCountOrDefault( + java.lang.String key, + int defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetDeviceCount().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+   * number of devices of that type to use.  If a particular device
+   * type is not found in the map, the system picks an appropriate
+   * number.
+   * 
+ * + * map<string, int32> device_count = 1; + */ + + public int getDeviceCountOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetDeviceCount().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public static final int INTRA_OP_PARALLELISM_THREADS_FIELD_NUMBER = 2; + private int intraOpParallelismThreads_; + /** + *
+   * The execution of an individual op (for some op types) can be
+   * parallelized on a pool of intra_op_parallelism_threads.
+   * 0 means the system picks an appropriate number.
+   * 
+ * + * optional int32 intra_op_parallelism_threads = 2; + */ + public int getIntraOpParallelismThreads() { + return intraOpParallelismThreads_; + } + + public static final int INTER_OP_PARALLELISM_THREADS_FIELD_NUMBER = 5; + private int interOpParallelismThreads_; + /** + *
+   * Nodes that perform blocking operations are enqueued on a pool of
+   * inter_op_parallelism_threads available in each process.
+   * 0 means the system picks an appropriate number.
+   * Note that the first Session created in the process sets the
+   * number of threads for all future sessions unless use_per_session_threads is
+   * true or session_inter_op_thread_pool is configured.
+   * 
+ * + * optional int32 inter_op_parallelism_threads = 5; + */ + public int getInterOpParallelismThreads() { + return interOpParallelismThreads_; + } + + public static final int USE_PER_SESSION_THREADS_FIELD_NUMBER = 9; + private boolean usePerSessionThreads_; + /** + *
+   * If true, use a new set of threads for this session rather than the global
+   * pool of threads. Only supported by direct sessions.
+   * If false, use the global threads created by the first session, or the
+   * per-session thread pools configured by session_inter_op_thread_pool.
+   * This option is deprecated. The same effect can be achieved by setting
+   * session_inter_op_thread_pool to have one element, whose num_threads equals
+   * inter_op_parallelism_threads.
+   * 
+ * + * optional bool use_per_session_threads = 9; + */ + public boolean getUsePerSessionThreads() { + return usePerSessionThreads_; + } + + public static final int SESSION_INTER_OP_THREAD_POOL_FIELD_NUMBER = 12; + private java.util.List sessionInterOpThreadPool_; + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public java.util.List getSessionInterOpThreadPoolList() { + return sessionInterOpThreadPool_; + } + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public java.util.List + getSessionInterOpThreadPoolOrBuilderList() { + return sessionInterOpThreadPool_; + } + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public int getSessionInterOpThreadPoolCount() { + return sessionInterOpThreadPool_.size(); + } + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public org.tensorflow.framework.ThreadPoolOptionProto getSessionInterOpThreadPool(int index) { + return sessionInterOpThreadPool_.get(index); + } + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public org.tensorflow.framework.ThreadPoolOptionProtoOrBuilder getSessionInterOpThreadPoolOrBuilder( + int index) { + return sessionInterOpThreadPool_.get(index); + } + + public static final int PLACEMENT_PERIOD_FIELD_NUMBER = 3; + private int placementPeriod_; + /** + *
+   * Assignment of Nodes to Devices is recomputed every placement_period
+   * steps until the system warms up (at which point the recomputation
+   * typically slows down automatically).
+   * 
+ * + * optional int32 placement_period = 3; + */ + public int getPlacementPeriod() { + return placementPeriod_; + } + + public static final int DEVICE_FILTERS_FIELD_NUMBER = 4; + private com.google.protobuf.LazyStringList deviceFilters_; + /** + *
+   * When any filters are present sessions will ignore all devices which do not
+   * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+   * "/job:worker/replica:3", etc.
+   * 
+ * + * repeated string device_filters = 4; + */ + public com.google.protobuf.ProtocolStringList + getDeviceFiltersList() { + return deviceFilters_; + } + /** + *
+   * When any filters are present sessions will ignore all devices which do not
+   * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+   * "/job:worker/replica:3", etc.
+   * 
+ * + * repeated string device_filters = 4; + */ + public int getDeviceFiltersCount() { + return deviceFilters_.size(); + } + /** + *
+   * When any filters are present sessions will ignore all devices which do not
+   * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+   * "/job:worker/replica:3", etc.
+   * 
+ * + * repeated string device_filters = 4; + */ + public java.lang.String getDeviceFilters(int index) { + return deviceFilters_.get(index); + } + /** + *
+   * When any filters are present sessions will ignore all devices which do not
+   * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+   * "/job:worker/replica:3", etc.
+   * 
+ * + * repeated string device_filters = 4; + */ + public com.google.protobuf.ByteString + getDeviceFiltersBytes(int index) { + return deviceFilters_.getByteString(index); + } + + public static final int GPU_OPTIONS_FIELD_NUMBER = 6; + private org.tensorflow.framework.GPUOptions gpuOptions_; + /** + *
+   * Options that apply to all GPUs.
+   * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public boolean hasGpuOptions() { + return gpuOptions_ != null; + } + /** + *
+   * Options that apply to all GPUs.
+   * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public org.tensorflow.framework.GPUOptions getGpuOptions() { + return gpuOptions_ == null ? org.tensorflow.framework.GPUOptions.getDefaultInstance() : gpuOptions_; + } + /** + *
+   * Options that apply to all GPUs.
+   * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public org.tensorflow.framework.GPUOptionsOrBuilder getGpuOptionsOrBuilder() { + return getGpuOptions(); + } + + public static final int ALLOW_SOFT_PLACEMENT_FIELD_NUMBER = 7; + private boolean allowSoftPlacement_; + /** + *
+   * Whether soft placement is allowed. If allow_soft_placement is true,
+   * an op will be placed on CPU if
+   *   1. there's no GPU implementation for the OP
+   * or
+   *   2. no GPU devices are known or registered
+   * or
+   *   3. need to co-locate with reftype input(s) which are from CPU.
+   * 
+ * + * optional bool allow_soft_placement = 7; + */ + public boolean getAllowSoftPlacement() { + return allowSoftPlacement_; + } + + public static final int LOG_DEVICE_PLACEMENT_FIELD_NUMBER = 8; + private boolean logDevicePlacement_; + /** + *
+   * Whether device placements should be logged.
+   * 
+ * + * optional bool log_device_placement = 8; + */ + public boolean getLogDevicePlacement() { + return logDevicePlacement_; + } + + public static final int GRAPH_OPTIONS_FIELD_NUMBER = 10; + private org.tensorflow.framework.GraphOptions graphOptions_; + /** + *
+   * Options that apply to all graphs.
+   * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public boolean hasGraphOptions() { + return graphOptions_ != null; + } + /** + *
+   * Options that apply to all graphs.
+   * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public org.tensorflow.framework.GraphOptions getGraphOptions() { + return graphOptions_ == null ? org.tensorflow.framework.GraphOptions.getDefaultInstance() : graphOptions_; + } + /** + *
+   * Options that apply to all graphs.
+   * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public org.tensorflow.framework.GraphOptionsOrBuilder getGraphOptionsOrBuilder() { + return getGraphOptions(); + } + + public static final int OPERATION_TIMEOUT_IN_MS_FIELD_NUMBER = 11; + private long operationTimeoutInMs_; + /** + *
+   * Global timeout for all blocking operations in this session.  If non-zero,
+   * and not overridden on a per-operation basis, this value will be used as the
+   * deadline for all blocking operations.
+   * 
+ * + * optional int64 operation_timeout_in_ms = 11; + */ + public long getOperationTimeoutInMs() { + return operationTimeoutInMs_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetDeviceCount(), + DeviceCountDefaultEntryHolder.defaultEntry, + 1); + if (intraOpParallelismThreads_ != 0) { + output.writeInt32(2, intraOpParallelismThreads_); + } + if (placementPeriod_ != 0) { + output.writeInt32(3, placementPeriod_); + } + for (int i = 0; i < deviceFilters_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, deviceFilters_.getRaw(i)); + } + if (interOpParallelismThreads_ != 0) { + output.writeInt32(5, interOpParallelismThreads_); + } + if (gpuOptions_ != null) { + output.writeMessage(6, getGpuOptions()); + } + if (allowSoftPlacement_ != false) { + output.writeBool(7, allowSoftPlacement_); + } + if (logDevicePlacement_ != false) { + output.writeBool(8, logDevicePlacement_); + } + if (usePerSessionThreads_ != false) { + output.writeBool(9, usePerSessionThreads_); + } + if (graphOptions_ != null) { + output.writeMessage(10, getGraphOptions()); + } + if (operationTimeoutInMs_ != 0L) { + output.writeInt64(11, operationTimeoutInMs_); + } + for (int i = 0; i < sessionInterOpThreadPool_.size(); i++) { + output.writeMessage(12, sessionInterOpThreadPool_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (java.util.Map.Entry entry + : internalGetDeviceCount().getMap().entrySet()) { + com.google.protobuf.MapEntry + deviceCount__ = DeviceCountDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, deviceCount__); + } + if (intraOpParallelismThreads_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, intraOpParallelismThreads_); + } + if (placementPeriod_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, placementPeriod_); + } + { + int dataSize = 0; + for (int i = 0; i < deviceFilters_.size(); i++) { + dataSize += computeStringSizeNoTag(deviceFilters_.getRaw(i)); + } + size += dataSize; + size += 1 * getDeviceFiltersList().size(); + } + if (interOpParallelismThreads_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(5, interOpParallelismThreads_); + } + if (gpuOptions_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, getGpuOptions()); + } + if (allowSoftPlacement_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(7, allowSoftPlacement_); + } + if (logDevicePlacement_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(8, logDevicePlacement_); + } + if (usePerSessionThreads_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(9, usePerSessionThreads_); + } + if (graphOptions_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, getGraphOptions()); + } + if (operationTimeoutInMs_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(11, operationTimeoutInMs_); + } + for (int i = 0; i < sessionInterOpThreadPool_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(12, sessionInterOpThreadPool_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.ConfigProto)) { + return super.equals(obj); + } + org.tensorflow.framework.ConfigProto other = (org.tensorflow.framework.ConfigProto) obj; + + boolean result = true; + result = result && internalGetDeviceCount().equals( + other.internalGetDeviceCount()); + result = result && (getIntraOpParallelismThreads() + == other.getIntraOpParallelismThreads()); + result = result && (getInterOpParallelismThreads() + == other.getInterOpParallelismThreads()); + result = result && (getUsePerSessionThreads() + == other.getUsePerSessionThreads()); + result = result && getSessionInterOpThreadPoolList() + .equals(other.getSessionInterOpThreadPoolList()); + result = result && (getPlacementPeriod() + == other.getPlacementPeriod()); + result = result && getDeviceFiltersList() + .equals(other.getDeviceFiltersList()); + result = result && (hasGpuOptions() == other.hasGpuOptions()); + if (hasGpuOptions()) { + result = result && getGpuOptions() + .equals(other.getGpuOptions()); + } + result = result && (getAllowSoftPlacement() + == other.getAllowSoftPlacement()); + result = result && (getLogDevicePlacement() + == other.getLogDevicePlacement()); + result = result && (hasGraphOptions() == other.hasGraphOptions()); + if (hasGraphOptions()) { + result = result && getGraphOptions() + .equals(other.getGraphOptions()); + } + result = result && (getOperationTimeoutInMs() + == other.getOperationTimeoutInMs()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (!internalGetDeviceCount().getMap().isEmpty()) { + hash = (37 * hash) + DEVICE_COUNT_FIELD_NUMBER; + hash = (53 * hash) + internalGetDeviceCount().hashCode(); + } + hash = (37 * hash) + INTRA_OP_PARALLELISM_THREADS_FIELD_NUMBER; + hash = (53 * hash) + getIntraOpParallelismThreads(); + hash = (37 * hash) + INTER_OP_PARALLELISM_THREADS_FIELD_NUMBER; + hash = (53 * hash) + getInterOpParallelismThreads(); + hash = (37 * hash) + USE_PER_SESSION_THREADS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getUsePerSessionThreads()); + if (getSessionInterOpThreadPoolCount() > 0) { + hash = (37 * hash) + SESSION_INTER_OP_THREAD_POOL_FIELD_NUMBER; + hash = (53 * hash) + getSessionInterOpThreadPoolList().hashCode(); + } + hash = (37 * hash) + PLACEMENT_PERIOD_FIELD_NUMBER; + hash = (53 * hash) + getPlacementPeriod(); + if (getDeviceFiltersCount() > 0) { + hash = (37 * hash) + DEVICE_FILTERS_FIELD_NUMBER; + hash = (53 * hash) + getDeviceFiltersList().hashCode(); + } + if (hasGpuOptions()) { + hash = (37 * hash) + GPU_OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getGpuOptions().hashCode(); + } + hash = (37 * hash) + ALLOW_SOFT_PLACEMENT_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getAllowSoftPlacement()); + hash = (37 * hash) + LOG_DEVICE_PLACEMENT_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getLogDevicePlacement()); + if (hasGraphOptions()) { + hash = (37 * hash) + GRAPH_OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getGraphOptions().hashCode(); + } + hash = (37 * hash) + OPERATION_TIMEOUT_IN_MS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getOperationTimeoutInMs()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.ConfigProto parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.ConfigProto parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.ConfigProto parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.ConfigProto parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.ConfigProto parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.ConfigProto parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.ConfigProto parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.ConfigProto parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.ConfigProto parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.ConfigProto parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.ConfigProto prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Session configuration parameters.
+   * The system picks appropriate values for fields that are not set.
+   * 
+ * + * Protobuf type {@code tensorflow.ConfigProto} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.ConfigProto) + org.tensorflow.framework.ConfigProtoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_ConfigProto_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetDeviceCount(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 1: + return internalGetMutableDeviceCount(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_ConfigProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.ConfigProto.class, org.tensorflow.framework.ConfigProto.Builder.class); + } + + // Construct using org.tensorflow.framework.ConfigProto.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getSessionInterOpThreadPoolFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + internalGetMutableDeviceCount().clear(); + intraOpParallelismThreads_ = 0; + + interOpParallelismThreads_ = 0; + + usePerSessionThreads_ = false; + + if (sessionInterOpThreadPoolBuilder_ == null) { + sessionInterOpThreadPool_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + } else { + sessionInterOpThreadPoolBuilder_.clear(); + } + placementPeriod_ = 0; + + deviceFilters_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000040); + if (gpuOptionsBuilder_ == null) { + gpuOptions_ = null; + } else { + gpuOptions_ = null; + gpuOptionsBuilder_ = null; + } + allowSoftPlacement_ = false; + + logDevicePlacement_ = false; + + if (graphOptionsBuilder_ == null) { + graphOptions_ = null; + } else { + graphOptions_ = null; + graphOptionsBuilder_ = null; + } + operationTimeoutInMs_ = 0L; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_ConfigProto_descriptor; + } + + public org.tensorflow.framework.ConfigProto getDefaultInstanceForType() { + return org.tensorflow.framework.ConfigProto.getDefaultInstance(); + } + + public org.tensorflow.framework.ConfigProto build() { + org.tensorflow.framework.ConfigProto result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.ConfigProto buildPartial() { + org.tensorflow.framework.ConfigProto result = new org.tensorflow.framework.ConfigProto(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.deviceCount_ = internalGetDeviceCount(); + result.deviceCount_.makeImmutable(); + result.intraOpParallelismThreads_ = intraOpParallelismThreads_; + result.interOpParallelismThreads_ = interOpParallelismThreads_; + result.usePerSessionThreads_ = usePerSessionThreads_; + if (sessionInterOpThreadPoolBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { + sessionInterOpThreadPool_ = java.util.Collections.unmodifiableList(sessionInterOpThreadPool_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.sessionInterOpThreadPool_ = sessionInterOpThreadPool_; + } else { + result.sessionInterOpThreadPool_ = sessionInterOpThreadPoolBuilder_.build(); + } + result.placementPeriod_ = placementPeriod_; + if (((bitField0_ & 0x00000040) == 0x00000040)) { + deviceFilters_ = deviceFilters_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000040); + } + result.deviceFilters_ = deviceFilters_; + if (gpuOptionsBuilder_ == null) { + result.gpuOptions_ = gpuOptions_; + } else { + result.gpuOptions_ = gpuOptionsBuilder_.build(); + } + result.allowSoftPlacement_ = allowSoftPlacement_; + result.logDevicePlacement_ = logDevicePlacement_; + if (graphOptionsBuilder_ == null) { + result.graphOptions_ = graphOptions_; + } else { + result.graphOptions_ = graphOptionsBuilder_.build(); + } + result.operationTimeoutInMs_ = operationTimeoutInMs_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.ConfigProto) { + return mergeFrom((org.tensorflow.framework.ConfigProto)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.ConfigProto other) { + if (other == org.tensorflow.framework.ConfigProto.getDefaultInstance()) return this; + internalGetMutableDeviceCount().mergeFrom( + other.internalGetDeviceCount()); + if (other.getIntraOpParallelismThreads() != 0) { + setIntraOpParallelismThreads(other.getIntraOpParallelismThreads()); + } + if (other.getInterOpParallelismThreads() != 0) { + setInterOpParallelismThreads(other.getInterOpParallelismThreads()); + } + if (other.getUsePerSessionThreads() != false) { + setUsePerSessionThreads(other.getUsePerSessionThreads()); + } + if (sessionInterOpThreadPoolBuilder_ == null) { + if (!other.sessionInterOpThreadPool_.isEmpty()) { + if (sessionInterOpThreadPool_.isEmpty()) { + sessionInterOpThreadPool_ = other.sessionInterOpThreadPool_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureSessionInterOpThreadPoolIsMutable(); + sessionInterOpThreadPool_.addAll(other.sessionInterOpThreadPool_); + } + onChanged(); + } + } else { + if (!other.sessionInterOpThreadPool_.isEmpty()) { + if (sessionInterOpThreadPoolBuilder_.isEmpty()) { + sessionInterOpThreadPoolBuilder_.dispose(); + sessionInterOpThreadPoolBuilder_ = null; + sessionInterOpThreadPool_ = other.sessionInterOpThreadPool_; + bitField0_ = (bitField0_ & ~0x00000010); + sessionInterOpThreadPoolBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getSessionInterOpThreadPoolFieldBuilder() : null; + } else { + sessionInterOpThreadPoolBuilder_.addAllMessages(other.sessionInterOpThreadPool_); + } + } + } + if (other.getPlacementPeriod() != 0) { + setPlacementPeriod(other.getPlacementPeriod()); + } + if (!other.deviceFilters_.isEmpty()) { + if (deviceFilters_.isEmpty()) { + deviceFilters_ = other.deviceFilters_; + bitField0_ = (bitField0_ & ~0x00000040); + } else { + ensureDeviceFiltersIsMutable(); + deviceFilters_.addAll(other.deviceFilters_); + } + onChanged(); + } + if (other.hasGpuOptions()) { + mergeGpuOptions(other.getGpuOptions()); + } + if (other.getAllowSoftPlacement() != false) { + setAllowSoftPlacement(other.getAllowSoftPlacement()); + } + if (other.getLogDevicePlacement() != false) { + setLogDevicePlacement(other.getLogDevicePlacement()); + } + if (other.hasGraphOptions()) { + mergeGraphOptions(other.getGraphOptions()); + } + if (other.getOperationTimeoutInMs() != 0L) { + setOperationTimeoutInMs(other.getOperationTimeoutInMs()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.ConfigProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.ConfigProto) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private com.google.protobuf.MapField< + java.lang.String, java.lang.Integer> deviceCount_; + private com.google.protobuf.MapField + internalGetDeviceCount() { + if (deviceCount_ == null) { + return com.google.protobuf.MapField.emptyMapField( + DeviceCountDefaultEntryHolder.defaultEntry); + } + return deviceCount_; + } + private com.google.protobuf.MapField + internalGetMutableDeviceCount() { + onChanged();; + if (deviceCount_ == null) { + deviceCount_ = com.google.protobuf.MapField.newMapField( + DeviceCountDefaultEntryHolder.defaultEntry); + } + if (!deviceCount_.isMutable()) { + deviceCount_ = deviceCount_.copy(); + } + return deviceCount_; + } + + public int getDeviceCountCount() { + return internalGetDeviceCount().getMap().size(); + } + /** + *
+     * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+     * number of devices of that type to use.  If a particular device
+     * type is not found in the map, the system picks an appropriate
+     * number.
+     * 
+ * + * map<string, int32> device_count = 1; + */ + + public boolean containsDeviceCount( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetDeviceCount().getMap().containsKey(key); + } + /** + * Use {@link #getDeviceCountMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getDeviceCount() { + return getDeviceCountMap(); + } + /** + *
+     * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+     * number of devices of that type to use.  If a particular device
+     * type is not found in the map, the system picks an appropriate
+     * number.
+     * 
+ * + * map<string, int32> device_count = 1; + */ + + public java.util.Map getDeviceCountMap() { + return internalGetDeviceCount().getMap(); + } + /** + *
+     * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+     * number of devices of that type to use.  If a particular device
+     * type is not found in the map, the system picks an appropriate
+     * number.
+     * 
+ * + * map<string, int32> device_count = 1; + */ + + public int getDeviceCountOrDefault( + java.lang.String key, + int defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetDeviceCount().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+     * number of devices of that type to use.  If a particular device
+     * type is not found in the map, the system picks an appropriate
+     * number.
+     * 
+ * + * map<string, int32> device_count = 1; + */ + + public int getDeviceCountOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetDeviceCount().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearDeviceCount() { + getMutableDeviceCount().clear(); + return this; + } + /** + *
+     * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+     * number of devices of that type to use.  If a particular device
+     * type is not found in the map, the system picks an appropriate
+     * number.
+     * 
+ * + * map<string, int32> device_count = 1; + */ + + public Builder removeDeviceCount( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + getMutableDeviceCount().remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableDeviceCount() { + return internalGetMutableDeviceCount().getMutableMap(); + } + /** + *
+     * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+     * number of devices of that type to use.  If a particular device
+     * type is not found in the map, the system picks an appropriate
+     * number.
+     * 
+ * + * map<string, int32> device_count = 1; + */ + public Builder putDeviceCount( + java.lang.String key, + int value) { + if (key == null) { throw new java.lang.NullPointerException(); } + + getMutableDeviceCount().put(key, value); + return this; + } + /** + *
+     * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+     * number of devices of that type to use.  If a particular device
+     * type is not found in the map, the system picks an appropriate
+     * number.
+     * 
+ * + * map<string, int32> device_count = 1; + */ + + public Builder putAllDeviceCount( + java.util.Map values) { + getMutableDeviceCount().putAll(values); + return this; + } + + private int intraOpParallelismThreads_ ; + /** + *
+     * The execution of an individual op (for some op types) can be
+     * parallelized on a pool of intra_op_parallelism_threads.
+     * 0 means the system picks an appropriate number.
+     * 
+ * + * optional int32 intra_op_parallelism_threads = 2; + */ + public int getIntraOpParallelismThreads() { + return intraOpParallelismThreads_; + } + /** + *
+     * The execution of an individual op (for some op types) can be
+     * parallelized on a pool of intra_op_parallelism_threads.
+     * 0 means the system picks an appropriate number.
+     * 
+ * + * optional int32 intra_op_parallelism_threads = 2; + */ + public Builder setIntraOpParallelismThreads(int value) { + + intraOpParallelismThreads_ = value; + onChanged(); + return this; + } + /** + *
+     * The execution of an individual op (for some op types) can be
+     * parallelized on a pool of intra_op_parallelism_threads.
+     * 0 means the system picks an appropriate number.
+     * 
+ * + * optional int32 intra_op_parallelism_threads = 2; + */ + public Builder clearIntraOpParallelismThreads() { + + intraOpParallelismThreads_ = 0; + onChanged(); + return this; + } + + private int interOpParallelismThreads_ ; + /** + *
+     * Nodes that perform blocking operations are enqueued on a pool of
+     * inter_op_parallelism_threads available in each process.
+     * 0 means the system picks an appropriate number.
+     * Note that the first Session created in the process sets the
+     * number of threads for all future sessions unless use_per_session_threads is
+     * true or session_inter_op_thread_pool is configured.
+     * 
+ * + * optional int32 inter_op_parallelism_threads = 5; + */ + public int getInterOpParallelismThreads() { + return interOpParallelismThreads_; + } + /** + *
+     * Nodes that perform blocking operations are enqueued on a pool of
+     * inter_op_parallelism_threads available in each process.
+     * 0 means the system picks an appropriate number.
+     * Note that the first Session created in the process sets the
+     * number of threads for all future sessions unless use_per_session_threads is
+     * true or session_inter_op_thread_pool is configured.
+     * 
+ * + * optional int32 inter_op_parallelism_threads = 5; + */ + public Builder setInterOpParallelismThreads(int value) { + + interOpParallelismThreads_ = value; + onChanged(); + return this; + } + /** + *
+     * Nodes that perform blocking operations are enqueued on a pool of
+     * inter_op_parallelism_threads available in each process.
+     * 0 means the system picks an appropriate number.
+     * Note that the first Session created in the process sets the
+     * number of threads for all future sessions unless use_per_session_threads is
+     * true or session_inter_op_thread_pool is configured.
+     * 
+ * + * optional int32 inter_op_parallelism_threads = 5; + */ + public Builder clearInterOpParallelismThreads() { + + interOpParallelismThreads_ = 0; + onChanged(); + return this; + } + + private boolean usePerSessionThreads_ ; + /** + *
+     * If true, use a new set of threads for this session rather than the global
+     * pool of threads. Only supported by direct sessions.
+     * If false, use the global threads created by the first session, or the
+     * per-session thread pools configured by session_inter_op_thread_pool.
+     * This option is deprecated. The same effect can be achieved by setting
+     * session_inter_op_thread_pool to have one element, whose num_threads equals
+     * inter_op_parallelism_threads.
+     * 
+ * + * optional bool use_per_session_threads = 9; + */ + public boolean getUsePerSessionThreads() { + return usePerSessionThreads_; + } + /** + *
+     * If true, use a new set of threads for this session rather than the global
+     * pool of threads. Only supported by direct sessions.
+     * If false, use the global threads created by the first session, or the
+     * per-session thread pools configured by session_inter_op_thread_pool.
+     * This option is deprecated. The same effect can be achieved by setting
+     * session_inter_op_thread_pool to have one element, whose num_threads equals
+     * inter_op_parallelism_threads.
+     * 
+ * + * optional bool use_per_session_threads = 9; + */ + public Builder setUsePerSessionThreads(boolean value) { + + usePerSessionThreads_ = value; + onChanged(); + return this; + } + /** + *
+     * If true, use a new set of threads for this session rather than the global
+     * pool of threads. Only supported by direct sessions.
+     * If false, use the global threads created by the first session, or the
+     * per-session thread pools configured by session_inter_op_thread_pool.
+     * This option is deprecated. The same effect can be achieved by setting
+     * session_inter_op_thread_pool to have one element, whose num_threads equals
+     * inter_op_parallelism_threads.
+     * 
+ * + * optional bool use_per_session_threads = 9; + */ + public Builder clearUsePerSessionThreads() { + + usePerSessionThreads_ = false; + onChanged(); + return this; + } + + private java.util.List sessionInterOpThreadPool_ = + java.util.Collections.emptyList(); + private void ensureSessionInterOpThreadPoolIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + sessionInterOpThreadPool_ = new java.util.ArrayList(sessionInterOpThreadPool_); + bitField0_ |= 0x00000010; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.ThreadPoolOptionProto, org.tensorflow.framework.ThreadPoolOptionProto.Builder, org.tensorflow.framework.ThreadPoolOptionProtoOrBuilder> sessionInterOpThreadPoolBuilder_; + + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public java.util.List getSessionInterOpThreadPoolList() { + if (sessionInterOpThreadPoolBuilder_ == null) { + return java.util.Collections.unmodifiableList(sessionInterOpThreadPool_); + } else { + return sessionInterOpThreadPoolBuilder_.getMessageList(); + } + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public int getSessionInterOpThreadPoolCount() { + if (sessionInterOpThreadPoolBuilder_ == null) { + return sessionInterOpThreadPool_.size(); + } else { + return sessionInterOpThreadPoolBuilder_.getCount(); + } + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public org.tensorflow.framework.ThreadPoolOptionProto getSessionInterOpThreadPool(int index) { + if (sessionInterOpThreadPoolBuilder_ == null) { + return sessionInterOpThreadPool_.get(index); + } else { + return sessionInterOpThreadPoolBuilder_.getMessage(index); + } + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public Builder setSessionInterOpThreadPool( + int index, org.tensorflow.framework.ThreadPoolOptionProto value) { + if (sessionInterOpThreadPoolBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureSessionInterOpThreadPoolIsMutable(); + sessionInterOpThreadPool_.set(index, value); + onChanged(); + } else { + sessionInterOpThreadPoolBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public Builder setSessionInterOpThreadPool( + int index, org.tensorflow.framework.ThreadPoolOptionProto.Builder builderForValue) { + if (sessionInterOpThreadPoolBuilder_ == null) { + ensureSessionInterOpThreadPoolIsMutable(); + sessionInterOpThreadPool_.set(index, builderForValue.build()); + onChanged(); + } else { + sessionInterOpThreadPoolBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public Builder addSessionInterOpThreadPool(org.tensorflow.framework.ThreadPoolOptionProto value) { + if (sessionInterOpThreadPoolBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureSessionInterOpThreadPoolIsMutable(); + sessionInterOpThreadPool_.add(value); + onChanged(); + } else { + sessionInterOpThreadPoolBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public Builder addSessionInterOpThreadPool( + int index, org.tensorflow.framework.ThreadPoolOptionProto value) { + if (sessionInterOpThreadPoolBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureSessionInterOpThreadPoolIsMutable(); + sessionInterOpThreadPool_.add(index, value); + onChanged(); + } else { + sessionInterOpThreadPoolBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public Builder addSessionInterOpThreadPool( + org.tensorflow.framework.ThreadPoolOptionProto.Builder builderForValue) { + if (sessionInterOpThreadPoolBuilder_ == null) { + ensureSessionInterOpThreadPoolIsMutable(); + sessionInterOpThreadPool_.add(builderForValue.build()); + onChanged(); + } else { + sessionInterOpThreadPoolBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public Builder addSessionInterOpThreadPool( + int index, org.tensorflow.framework.ThreadPoolOptionProto.Builder builderForValue) { + if (sessionInterOpThreadPoolBuilder_ == null) { + ensureSessionInterOpThreadPoolIsMutable(); + sessionInterOpThreadPool_.add(index, builderForValue.build()); + onChanged(); + } else { + sessionInterOpThreadPoolBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public Builder addAllSessionInterOpThreadPool( + java.lang.Iterable values) { + if (sessionInterOpThreadPoolBuilder_ == null) { + ensureSessionInterOpThreadPoolIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, sessionInterOpThreadPool_); + onChanged(); + } else { + sessionInterOpThreadPoolBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public Builder clearSessionInterOpThreadPool() { + if (sessionInterOpThreadPoolBuilder_ == null) { + sessionInterOpThreadPool_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + } else { + sessionInterOpThreadPoolBuilder_.clear(); + } + return this; + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public Builder removeSessionInterOpThreadPool(int index) { + if (sessionInterOpThreadPoolBuilder_ == null) { + ensureSessionInterOpThreadPoolIsMutable(); + sessionInterOpThreadPool_.remove(index); + onChanged(); + } else { + sessionInterOpThreadPoolBuilder_.remove(index); + } + return this; + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public org.tensorflow.framework.ThreadPoolOptionProto.Builder getSessionInterOpThreadPoolBuilder( + int index) { + return getSessionInterOpThreadPoolFieldBuilder().getBuilder(index); + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public org.tensorflow.framework.ThreadPoolOptionProtoOrBuilder getSessionInterOpThreadPoolOrBuilder( + int index) { + if (sessionInterOpThreadPoolBuilder_ == null) { + return sessionInterOpThreadPool_.get(index); } else { + return sessionInterOpThreadPoolBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public java.util.List + getSessionInterOpThreadPoolOrBuilderList() { + if (sessionInterOpThreadPoolBuilder_ != null) { + return sessionInterOpThreadPoolBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(sessionInterOpThreadPool_); + } + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public org.tensorflow.framework.ThreadPoolOptionProto.Builder addSessionInterOpThreadPoolBuilder() { + return getSessionInterOpThreadPoolFieldBuilder().addBuilder( + org.tensorflow.framework.ThreadPoolOptionProto.getDefaultInstance()); + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public org.tensorflow.framework.ThreadPoolOptionProto.Builder addSessionInterOpThreadPoolBuilder( + int index) { + return getSessionInterOpThreadPoolFieldBuilder().addBuilder( + index, org.tensorflow.framework.ThreadPoolOptionProto.getDefaultInstance()); + } + /** + *
+     * This option is experimental - it may be replaced with a different mechanism
+     * in the future. The intended use is for when some session invocations need
+     * to run in a background pool limited to a small number of threads.
+     * Configures session thread pools. If this is configured, then RunOptions for
+     * a Run call can select the thread pool to use.
+     * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+     * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + public java.util.List + getSessionInterOpThreadPoolBuilderList() { + return getSessionInterOpThreadPoolFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.ThreadPoolOptionProto, org.tensorflow.framework.ThreadPoolOptionProto.Builder, org.tensorflow.framework.ThreadPoolOptionProtoOrBuilder> + getSessionInterOpThreadPoolFieldBuilder() { + if (sessionInterOpThreadPoolBuilder_ == null) { + sessionInterOpThreadPoolBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.ThreadPoolOptionProto, org.tensorflow.framework.ThreadPoolOptionProto.Builder, org.tensorflow.framework.ThreadPoolOptionProtoOrBuilder>( + sessionInterOpThreadPool_, + ((bitField0_ & 0x00000010) == 0x00000010), + getParentForChildren(), + isClean()); + sessionInterOpThreadPool_ = null; + } + return sessionInterOpThreadPoolBuilder_; + } + + private int placementPeriod_ ; + /** + *
+     * Assignment of Nodes to Devices is recomputed every placement_period
+     * steps until the system warms up (at which point the recomputation
+     * typically slows down automatically).
+     * 
+ * + * optional int32 placement_period = 3; + */ + public int getPlacementPeriod() { + return placementPeriod_; + } + /** + *
+     * Assignment of Nodes to Devices is recomputed every placement_period
+     * steps until the system warms up (at which point the recomputation
+     * typically slows down automatically).
+     * 
+ * + * optional int32 placement_period = 3; + */ + public Builder setPlacementPeriod(int value) { + + placementPeriod_ = value; + onChanged(); + return this; + } + /** + *
+     * Assignment of Nodes to Devices is recomputed every placement_period
+     * steps until the system warms up (at which point the recomputation
+     * typically slows down automatically).
+     * 
+ * + * optional int32 placement_period = 3; + */ + public Builder clearPlacementPeriod() { + + placementPeriod_ = 0; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList deviceFilters_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureDeviceFiltersIsMutable() { + if (!((bitField0_ & 0x00000040) == 0x00000040)) { + deviceFilters_ = new com.google.protobuf.LazyStringArrayList(deviceFilters_); + bitField0_ |= 0x00000040; + } + } + /** + *
+     * When any filters are present sessions will ignore all devices which do not
+     * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+     * "/job:worker/replica:3", etc.
+     * 
+ * + * repeated string device_filters = 4; + */ + public com.google.protobuf.ProtocolStringList + getDeviceFiltersList() { + return deviceFilters_.getUnmodifiableView(); + } + /** + *
+     * When any filters are present sessions will ignore all devices which do not
+     * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+     * "/job:worker/replica:3", etc.
+     * 
+ * + * repeated string device_filters = 4; + */ + public int getDeviceFiltersCount() { + return deviceFilters_.size(); + } + /** + *
+     * When any filters are present sessions will ignore all devices which do not
+     * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+     * "/job:worker/replica:3", etc.
+     * 
+ * + * repeated string device_filters = 4; + */ + public java.lang.String getDeviceFilters(int index) { + return deviceFilters_.get(index); + } + /** + *
+     * When any filters are present sessions will ignore all devices which do not
+     * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+     * "/job:worker/replica:3", etc.
+     * 
+ * + * repeated string device_filters = 4; + */ + public com.google.protobuf.ByteString + getDeviceFiltersBytes(int index) { + return deviceFilters_.getByteString(index); + } + /** + *
+     * When any filters are present sessions will ignore all devices which do not
+     * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+     * "/job:worker/replica:3", etc.
+     * 
+ * + * repeated string device_filters = 4; + */ + public Builder setDeviceFilters( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureDeviceFiltersIsMutable(); + deviceFilters_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * When any filters are present sessions will ignore all devices which do not
+     * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+     * "/job:worker/replica:3", etc.
+     * 
+ * + * repeated string device_filters = 4; + */ + public Builder addDeviceFilters( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureDeviceFiltersIsMutable(); + deviceFilters_.add(value); + onChanged(); + return this; + } + /** + *
+     * When any filters are present sessions will ignore all devices which do not
+     * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+     * "/job:worker/replica:3", etc.
+     * 
+ * + * repeated string device_filters = 4; + */ + public Builder addAllDeviceFilters( + java.lang.Iterable values) { + ensureDeviceFiltersIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, deviceFilters_); + onChanged(); + return this; + } + /** + *
+     * When any filters are present sessions will ignore all devices which do not
+     * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+     * "/job:worker/replica:3", etc.
+     * 
+ * + * repeated string device_filters = 4; + */ + public Builder clearDeviceFilters() { + deviceFilters_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + return this; + } + /** + *
+     * When any filters are present sessions will ignore all devices which do not
+     * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+     * "/job:worker/replica:3", etc.
+     * 
+ * + * repeated string device_filters = 4; + */ + public Builder addDeviceFiltersBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureDeviceFiltersIsMutable(); + deviceFilters_.add(value); + onChanged(); + return this; + } + + private org.tensorflow.framework.GPUOptions gpuOptions_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.GPUOptions, org.tensorflow.framework.GPUOptions.Builder, org.tensorflow.framework.GPUOptionsOrBuilder> gpuOptionsBuilder_; + /** + *
+     * Options that apply to all GPUs.
+     * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public boolean hasGpuOptions() { + return gpuOptionsBuilder_ != null || gpuOptions_ != null; + } + /** + *
+     * Options that apply to all GPUs.
+     * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public org.tensorflow.framework.GPUOptions getGpuOptions() { + if (gpuOptionsBuilder_ == null) { + return gpuOptions_ == null ? org.tensorflow.framework.GPUOptions.getDefaultInstance() : gpuOptions_; + } else { + return gpuOptionsBuilder_.getMessage(); + } + } + /** + *
+     * Options that apply to all GPUs.
+     * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public Builder setGpuOptions(org.tensorflow.framework.GPUOptions value) { + if (gpuOptionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + gpuOptions_ = value; + onChanged(); + } else { + gpuOptionsBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Options that apply to all GPUs.
+     * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public Builder setGpuOptions( + org.tensorflow.framework.GPUOptions.Builder builderForValue) { + if (gpuOptionsBuilder_ == null) { + gpuOptions_ = builderForValue.build(); + onChanged(); + } else { + gpuOptionsBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Options that apply to all GPUs.
+     * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public Builder mergeGpuOptions(org.tensorflow.framework.GPUOptions value) { + if (gpuOptionsBuilder_ == null) { + if (gpuOptions_ != null) { + gpuOptions_ = + org.tensorflow.framework.GPUOptions.newBuilder(gpuOptions_).mergeFrom(value).buildPartial(); + } else { + gpuOptions_ = value; + } + onChanged(); + } else { + gpuOptionsBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Options that apply to all GPUs.
+     * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public Builder clearGpuOptions() { + if (gpuOptionsBuilder_ == null) { + gpuOptions_ = null; + onChanged(); + } else { + gpuOptions_ = null; + gpuOptionsBuilder_ = null; + } + + return this; + } + /** + *
+     * Options that apply to all GPUs.
+     * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public org.tensorflow.framework.GPUOptions.Builder getGpuOptionsBuilder() { + + onChanged(); + return getGpuOptionsFieldBuilder().getBuilder(); + } + /** + *
+     * Options that apply to all GPUs.
+     * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + public org.tensorflow.framework.GPUOptionsOrBuilder getGpuOptionsOrBuilder() { + if (gpuOptionsBuilder_ != null) { + return gpuOptionsBuilder_.getMessageOrBuilder(); + } else { + return gpuOptions_ == null ? + org.tensorflow.framework.GPUOptions.getDefaultInstance() : gpuOptions_; + } + } + /** + *
+     * Options that apply to all GPUs.
+     * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.GPUOptions, org.tensorflow.framework.GPUOptions.Builder, org.tensorflow.framework.GPUOptionsOrBuilder> + getGpuOptionsFieldBuilder() { + if (gpuOptionsBuilder_ == null) { + gpuOptionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.GPUOptions, org.tensorflow.framework.GPUOptions.Builder, org.tensorflow.framework.GPUOptionsOrBuilder>( + getGpuOptions(), + getParentForChildren(), + isClean()); + gpuOptions_ = null; + } + return gpuOptionsBuilder_; + } + + private boolean allowSoftPlacement_ ; + /** + *
+     * Whether soft placement is allowed. If allow_soft_placement is true,
+     * an op will be placed on CPU if
+     *   1. there's no GPU implementation for the OP
+     * or
+     *   2. no GPU devices are known or registered
+     * or
+     *   3. need to co-locate with reftype input(s) which are from CPU.
+     * 
+ * + * optional bool allow_soft_placement = 7; + */ + public boolean getAllowSoftPlacement() { + return allowSoftPlacement_; + } + /** + *
+     * Whether soft placement is allowed. If allow_soft_placement is true,
+     * an op will be placed on CPU if
+     *   1. there's no GPU implementation for the OP
+     * or
+     *   2. no GPU devices are known or registered
+     * or
+     *   3. need to co-locate with reftype input(s) which are from CPU.
+     * 
+ * + * optional bool allow_soft_placement = 7; + */ + public Builder setAllowSoftPlacement(boolean value) { + + allowSoftPlacement_ = value; + onChanged(); + return this; + } + /** + *
+     * Whether soft placement is allowed. If allow_soft_placement is true,
+     * an op will be placed on CPU if
+     *   1. there's no GPU implementation for the OP
+     * or
+     *   2. no GPU devices are known or registered
+     * or
+     *   3. need to co-locate with reftype input(s) which are from CPU.
+     * 
+ * + * optional bool allow_soft_placement = 7; + */ + public Builder clearAllowSoftPlacement() { + + allowSoftPlacement_ = false; + onChanged(); + return this; + } + + private boolean logDevicePlacement_ ; + /** + *
+     * Whether device placements should be logged.
+     * 
+ * + * optional bool log_device_placement = 8; + */ + public boolean getLogDevicePlacement() { + return logDevicePlacement_; + } + /** + *
+     * Whether device placements should be logged.
+     * 
+ * + * optional bool log_device_placement = 8; + */ + public Builder setLogDevicePlacement(boolean value) { + + logDevicePlacement_ = value; + onChanged(); + return this; + } + /** + *
+     * Whether device placements should be logged.
+     * 
+ * + * optional bool log_device_placement = 8; + */ + public Builder clearLogDevicePlacement() { + + logDevicePlacement_ = false; + onChanged(); + return this; + } + + private org.tensorflow.framework.GraphOptions graphOptions_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.GraphOptions, org.tensorflow.framework.GraphOptions.Builder, org.tensorflow.framework.GraphOptionsOrBuilder> graphOptionsBuilder_; + /** + *
+     * Options that apply to all graphs.
+     * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public boolean hasGraphOptions() { + return graphOptionsBuilder_ != null || graphOptions_ != null; + } + /** + *
+     * Options that apply to all graphs.
+     * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public org.tensorflow.framework.GraphOptions getGraphOptions() { + if (graphOptionsBuilder_ == null) { + return graphOptions_ == null ? org.tensorflow.framework.GraphOptions.getDefaultInstance() : graphOptions_; + } else { + return graphOptionsBuilder_.getMessage(); + } + } + /** + *
+     * Options that apply to all graphs.
+     * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public Builder setGraphOptions(org.tensorflow.framework.GraphOptions value) { + if (graphOptionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + graphOptions_ = value; + onChanged(); + } else { + graphOptionsBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Options that apply to all graphs.
+     * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public Builder setGraphOptions( + org.tensorflow.framework.GraphOptions.Builder builderForValue) { + if (graphOptionsBuilder_ == null) { + graphOptions_ = builderForValue.build(); + onChanged(); + } else { + graphOptionsBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Options that apply to all graphs.
+     * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public Builder mergeGraphOptions(org.tensorflow.framework.GraphOptions value) { + if (graphOptionsBuilder_ == null) { + if (graphOptions_ != null) { + graphOptions_ = + org.tensorflow.framework.GraphOptions.newBuilder(graphOptions_).mergeFrom(value).buildPartial(); + } else { + graphOptions_ = value; + } + onChanged(); + } else { + graphOptionsBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Options that apply to all graphs.
+     * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public Builder clearGraphOptions() { + if (graphOptionsBuilder_ == null) { + graphOptions_ = null; + onChanged(); + } else { + graphOptions_ = null; + graphOptionsBuilder_ = null; + } + + return this; + } + /** + *
+     * Options that apply to all graphs.
+     * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public org.tensorflow.framework.GraphOptions.Builder getGraphOptionsBuilder() { + + onChanged(); + return getGraphOptionsFieldBuilder().getBuilder(); + } + /** + *
+     * Options that apply to all graphs.
+     * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + public org.tensorflow.framework.GraphOptionsOrBuilder getGraphOptionsOrBuilder() { + if (graphOptionsBuilder_ != null) { + return graphOptionsBuilder_.getMessageOrBuilder(); + } else { + return graphOptions_ == null ? + org.tensorflow.framework.GraphOptions.getDefaultInstance() : graphOptions_; + } + } + /** + *
+     * Options that apply to all graphs.
+     * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.GraphOptions, org.tensorflow.framework.GraphOptions.Builder, org.tensorflow.framework.GraphOptionsOrBuilder> + getGraphOptionsFieldBuilder() { + if (graphOptionsBuilder_ == null) { + graphOptionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.GraphOptions, org.tensorflow.framework.GraphOptions.Builder, org.tensorflow.framework.GraphOptionsOrBuilder>( + getGraphOptions(), + getParentForChildren(), + isClean()); + graphOptions_ = null; + } + return graphOptionsBuilder_; + } + + private long operationTimeoutInMs_ ; + /** + *
+     * Global timeout for all blocking operations in this session.  If non-zero,
+     * and not overridden on a per-operation basis, this value will be used as the
+     * deadline for all blocking operations.
+     * 
+ * + * optional int64 operation_timeout_in_ms = 11; + */ + public long getOperationTimeoutInMs() { + return operationTimeoutInMs_; + } + /** + *
+     * Global timeout for all blocking operations in this session.  If non-zero,
+     * and not overridden on a per-operation basis, this value will be used as the
+     * deadline for all blocking operations.
+     * 
+ * + * optional int64 operation_timeout_in_ms = 11; + */ + public Builder setOperationTimeoutInMs(long value) { + + operationTimeoutInMs_ = value; + onChanged(); + return this; + } + /** + *
+     * Global timeout for all blocking operations in this session.  If non-zero,
+     * and not overridden on a per-operation basis, this value will be used as the
+     * deadline for all blocking operations.
+     * 
+ * + * optional int64 operation_timeout_in_ms = 11; + */ + public Builder clearOperationTimeoutInMs() { + + operationTimeoutInMs_ = 0L; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.ConfigProto) + } + + // @@protoc_insertion_point(class_scope:tensorflow.ConfigProto) + private static final org.tensorflow.framework.ConfigProto DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.ConfigProto(); + } + + public static org.tensorflow.framework.ConfigProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ConfigProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ConfigProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.ConfigProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProtoOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProtoOrBuilder.java new file mode 100644 index 0000000000000..66dec2affbd26 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProtoOrBuilder.java @@ -0,0 +1,326 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +public interface ConfigProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.ConfigProto) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+   * number of devices of that type to use.  If a particular device
+   * type is not found in the map, the system picks an appropriate
+   * number.
+   * 
+ * + * map<string, int32> device_count = 1; + */ + int getDeviceCountCount(); + /** + *
+   * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+   * number of devices of that type to use.  If a particular device
+   * type is not found in the map, the system picks an appropriate
+   * number.
+   * 
+ * + * map<string, int32> device_count = 1; + */ + boolean containsDeviceCount( + java.lang.String key); + /** + * Use {@link #getDeviceCountMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getDeviceCount(); + /** + *
+   * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+   * number of devices of that type to use.  If a particular device
+   * type is not found in the map, the system picks an appropriate
+   * number.
+   * 
+ * + * map<string, int32> device_count = 1; + */ + java.util.Map + getDeviceCountMap(); + /** + *
+   * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+   * number of devices of that type to use.  If a particular device
+   * type is not found in the map, the system picks an appropriate
+   * number.
+   * 
+ * + * map<string, int32> device_count = 1; + */ + + int getDeviceCountOrDefault( + java.lang.String key, + int defaultValue); + /** + *
+   * Map from device type name (e.g., "CPU" or "GPU" ) to maximum
+   * number of devices of that type to use.  If a particular device
+   * type is not found in the map, the system picks an appropriate
+   * number.
+   * 
+ * + * map<string, int32> device_count = 1; + */ + + int getDeviceCountOrThrow( + java.lang.String key); + + /** + *
+   * The execution of an individual op (for some op types) can be
+   * parallelized on a pool of intra_op_parallelism_threads.
+   * 0 means the system picks an appropriate number.
+   * 
+ * + * optional int32 intra_op_parallelism_threads = 2; + */ + int getIntraOpParallelismThreads(); + + /** + *
+   * Nodes that perform blocking operations are enqueued on a pool of
+   * inter_op_parallelism_threads available in each process.
+   * 0 means the system picks an appropriate number.
+   * Note that the first Session created in the process sets the
+   * number of threads for all future sessions unless use_per_session_threads is
+   * true or session_inter_op_thread_pool is configured.
+   * 
+ * + * optional int32 inter_op_parallelism_threads = 5; + */ + int getInterOpParallelismThreads(); + + /** + *
+   * If true, use a new set of threads for this session rather than the global
+   * pool of threads. Only supported by direct sessions.
+   * If false, use the global threads created by the first session, or the
+   * per-session thread pools configured by session_inter_op_thread_pool.
+   * This option is deprecated. The same effect can be achieved by setting
+   * session_inter_op_thread_pool to have one element, whose num_threads equals
+   * inter_op_parallelism_threads.
+   * 
+ * + * optional bool use_per_session_threads = 9; + */ + boolean getUsePerSessionThreads(); + + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + java.util.List + getSessionInterOpThreadPoolList(); + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + org.tensorflow.framework.ThreadPoolOptionProto getSessionInterOpThreadPool(int index); + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + int getSessionInterOpThreadPoolCount(); + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + java.util.List + getSessionInterOpThreadPoolOrBuilderList(); + /** + *
+   * This option is experimental - it may be replaced with a different mechanism
+   * in the future. The intended use is for when some session invocations need
+   * to run in a background pool limited to a small number of threads.
+   * Configures session thread pools. If this is configured, then RunOptions for
+   * a Run call can select the thread pool to use.
+   * If a pool's num_threads is 0, then inter_op_parallelism_threads is used.
+   * 
+ * + * repeated .tensorflow.ThreadPoolOptionProto session_inter_op_thread_pool = 12; + */ + org.tensorflow.framework.ThreadPoolOptionProtoOrBuilder getSessionInterOpThreadPoolOrBuilder( + int index); + + /** + *
+   * Assignment of Nodes to Devices is recomputed every placement_period
+   * steps until the system warms up (at which point the recomputation
+   * typically slows down automatically).
+   * 
+ * + * optional int32 placement_period = 3; + */ + int getPlacementPeriod(); + + /** + *
+   * When any filters are present sessions will ignore all devices which do not
+   * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+   * "/job:worker/replica:3", etc.
+   * 
+ * + * repeated string device_filters = 4; + */ + java.util.List + getDeviceFiltersList(); + /** + *
+   * When any filters are present sessions will ignore all devices which do not
+   * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+   * "/job:worker/replica:3", etc.
+   * 
+ * + * repeated string device_filters = 4; + */ + int getDeviceFiltersCount(); + /** + *
+   * When any filters are present sessions will ignore all devices which do not
+   * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+   * "/job:worker/replica:3", etc.
+   * 
+ * + * repeated string device_filters = 4; + */ + java.lang.String getDeviceFilters(int index); + /** + *
+   * When any filters are present sessions will ignore all devices which do not
+   * match the filters. Each filter can be partially specified, e.g. "/job:ps"
+   * "/job:worker/replica:3", etc.
+   * 
+ * + * repeated string device_filters = 4; + */ + com.google.protobuf.ByteString + getDeviceFiltersBytes(int index); + + /** + *
+   * Options that apply to all GPUs.
+   * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + boolean hasGpuOptions(); + /** + *
+   * Options that apply to all GPUs.
+   * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + org.tensorflow.framework.GPUOptions getGpuOptions(); + /** + *
+   * Options that apply to all GPUs.
+   * 
+ * + * optional .tensorflow.GPUOptions gpu_options = 6; + */ + org.tensorflow.framework.GPUOptionsOrBuilder getGpuOptionsOrBuilder(); + + /** + *
+   * Whether soft placement is allowed. If allow_soft_placement is true,
+   * an op will be placed on CPU if
+   *   1. there's no GPU implementation for the OP
+   * or
+   *   2. no GPU devices are known or registered
+   * or
+   *   3. need to co-locate with reftype input(s) which are from CPU.
+   * 
+ * + * optional bool allow_soft_placement = 7; + */ + boolean getAllowSoftPlacement(); + + /** + *
+   * Whether device placements should be logged.
+   * 
+ * + * optional bool log_device_placement = 8; + */ + boolean getLogDevicePlacement(); + + /** + *
+   * Options that apply to all graphs.
+   * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + boolean hasGraphOptions(); + /** + *
+   * Options that apply to all graphs.
+   * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + org.tensorflow.framework.GraphOptions getGraphOptions(); + /** + *
+   * Options that apply to all graphs.
+   * 
+ * + * optional .tensorflow.GraphOptions graph_options = 10; + */ + org.tensorflow.framework.GraphOptionsOrBuilder getGraphOptionsOrBuilder(); + + /** + *
+   * Global timeout for all blocking operations in this session.  If non-zero,
+   * and not overridden on a per-operation basis, this value will be used as the
+   * deadline for all blocking operations.
+   * 
+ * + * optional int64 operation_timeout_in_ms = 11; + */ + long getOperationTimeoutInMs(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProtos.java new file mode 100644 index 0000000000000..a1977775be4ae --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ConfigProtos.java @@ -0,0 +1,203 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +public final class ConfigProtos { + private ConfigProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_GPUOptions_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_GPUOptions_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_OptimizerOptions_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_OptimizerOptions_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_GraphOptions_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_GraphOptions_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_ThreadPoolOptionProto_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_ThreadPoolOptionProto_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_ConfigProto_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_ConfigProto_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_ConfigProto_DeviceCountEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_ConfigProto_DeviceCountEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_DebugTensorWatch_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_DebugTensorWatch_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_RunOptions_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_RunOptions_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_RunMetadata_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_RunMetadata_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n%tensorflow/core/protobuf/config.proto\022" + + "\ntensorflow\032*tensorflow/core/framework/c" + + "ost_graph.proto\032%tensorflow/core/framewo" + + "rk/graph.proto\032*tensorflow/core/framewor" + + "k/step_stats.proto\"\241\001\n\nGPUOptions\022\'\n\037per" + + "_process_gpu_memory_fraction\030\001 \001(\001\022\026\n\016al" + + "locator_type\030\002 \001(\t\022\037\n\027deferred_deletion_" + + "bytes\030\003 \001(\003\022\024\n\014allow_growth\030\004 \001(\010\022\033\n\023vis" + + "ible_device_list\030\005 \001(\t\"\337\002\n\020OptimizerOpti" + + "ons\022+\n#do_common_subexpression_eliminati", + "on\030\001 \001(\010\022\033\n\023do_constant_folding\030\002 \001(\010\022\034\n" + + "\024do_function_inlining\030\004 \001(\010\0225\n\topt_level" + + "\030\003 \001(\0162\".tensorflow.OptimizerOptions.Lev" + + "el\022E\n\020global_jit_level\030\005 \001(\0162+.tensorflo" + + "w.OptimizerOptions.GlobalJitLevel\" \n\005Lev" + + "el\022\006\n\002L1\020\000\022\017\n\002L0\020\377\377\377\377\377\377\377\377\377\001\"C\n\016GlobalJit" + + "Level\022\013\n\007DEFAULT\020\000\022\020\n\003OFF\020\377\377\377\377\377\377\377\377\377\001\022\010\n\004" + + "ON_1\020\001\022\010\n\004ON_2\020\002\"\271\002\n\014GraphOptions\022\036\n\026ena" + + "ble_recv_scheduling\030\002 \001(\010\0227\n\021optimizer_o" + + "ptions\030\003 \001(\0132\034.tensorflow.OptimizerOptio", + "ns\022\030\n\020build_cost_model\030\004 \001(\003\022\036\n\026build_co" + + "st_model_after\030\t \001(\003\022\024\n\014infer_shapes\030\005 \001" + + "(\010\022\032\n\022place_pruned_graph\030\006 \001(\010\022 \n\030enable" + + "_bfloat16_sendrecv\030\007 \001(\010\022\025\n\rtimeline_ste" + + "p\030\010 \001(\005J\004\010\001\020\002R%skip_common_subexpression" + + "_elimination\",\n\025ThreadPoolOptionProto\022\023\n" + + "\013num_threads\030\001 \001(\005\"\244\004\n\013ConfigProto\022>\n\014de" + + "vice_count\030\001 \003(\0132(.tensorflow.ConfigProt" + + "o.DeviceCountEntry\022$\n\034intra_op_paralleli" + + "sm_threads\030\002 \001(\005\022$\n\034inter_op_parallelism", + "_threads\030\005 \001(\005\022\037\n\027use_per_session_thread" + + "s\030\t \001(\010\022G\n\034session_inter_op_thread_pool\030" + + "\014 \003(\0132!.tensorflow.ThreadPoolOptionProto" + + "\022\030\n\020placement_period\030\003 \001(\005\022\026\n\016device_fil" + + "ters\030\004 \003(\t\022+\n\013gpu_options\030\006 \001(\0132\026.tensor" + + "flow.GPUOptions\022\034\n\024allow_soft_placement\030" + + "\007 \001(\010\022\034\n\024log_device_placement\030\010 \001(\010\022/\n\rg" + + "raph_options\030\n \001(\0132\030.tensorflow.GraphOpt" + + "ions\022\037\n\027operation_timeout_in_ms\030\013 \001(\003\0322\n" + + "\020DeviceCountEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030", + "\002 \001(\005:\0028\001\"a\n\020DebugTensorWatch\022\021\n\tnode_na" + + "me\030\001 \001(\t\022\023\n\013output_slot\030\002 \001(\005\022\021\n\tdebug_o" + + "ps\030\003 \003(\t\022\022\n\ndebug_urls\030\004 \003(\t\"\255\002\n\nRunOpti" + + "ons\0226\n\013trace_level\030\001 \001(\0162!.tensorflow.Ru" + + "nOptions.TraceLevel\022\025\n\rtimeout_in_ms\030\002 \001" + + "(\003\022\034\n\024inter_op_thread_pool\030\003 \001(\005\022=\n\027debu" + + "g_tensor_watch_opts\030\004 \003(\0132\034.tensorflow.D" + + "ebugTensorWatch\022\037\n\027output_partition_grap" + + "hs\030\005 \001(\010\"R\n\nTraceLevel\022\014\n\010NO_TRACE\020\000\022\022\n\016" + + "SOFTWARE_TRACE\020\001\022\022\n\016HARDWARE_TRACE\020\002\022\016\n\n", + "FULL_TRACE\020\003\"\226\001\n\013RunMetadata\022)\n\nstep_sta" + + "ts\030\001 \001(\0132\025.tensorflow.StepStats\022,\n\ncost_" + + "graph\030\002 \001(\0132\030.tensorflow.CostGraphDef\022.\n" + + "\020partition_graphs\030\003 \003(\0132\024.tensorflow.Gra" + + "phDefB-\n\030org.tensorflow.frameworkB\014Confi" + + "gProtosP\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.CostGraphProtos.getDescriptor(), + org.tensorflow.framework.GraphProtos.getDescriptor(), + org.tensorflow.framework.StepStatsProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_GPUOptions_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_GPUOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_GPUOptions_descriptor, + new java.lang.String[] { "PerProcessGpuMemoryFraction", "AllocatorType", "DeferredDeletionBytes", "AllowGrowth", "VisibleDeviceList", }); + internal_static_tensorflow_OptimizerOptions_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_tensorflow_OptimizerOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_OptimizerOptions_descriptor, + new java.lang.String[] { "DoCommonSubexpressionElimination", "DoConstantFolding", "DoFunctionInlining", "OptLevel", "GlobalJitLevel", }); + internal_static_tensorflow_GraphOptions_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_tensorflow_GraphOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_GraphOptions_descriptor, + new java.lang.String[] { "EnableRecvScheduling", "OptimizerOptions", "BuildCostModel", "BuildCostModelAfter", "InferShapes", "PlacePrunedGraph", "EnableBfloat16Sendrecv", "TimelineStep", }); + internal_static_tensorflow_ThreadPoolOptionProto_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_tensorflow_ThreadPoolOptionProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_ThreadPoolOptionProto_descriptor, + new java.lang.String[] { "NumThreads", }); + internal_static_tensorflow_ConfigProto_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_tensorflow_ConfigProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_ConfigProto_descriptor, + new java.lang.String[] { "DeviceCount", "IntraOpParallelismThreads", "InterOpParallelismThreads", "UsePerSessionThreads", "SessionInterOpThreadPool", "PlacementPeriod", "DeviceFilters", "GpuOptions", "AllowSoftPlacement", "LogDevicePlacement", "GraphOptions", "OperationTimeoutInMs", }); + internal_static_tensorflow_ConfigProto_DeviceCountEntry_descriptor = + internal_static_tensorflow_ConfigProto_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_ConfigProto_DeviceCountEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_ConfigProto_DeviceCountEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_tensorflow_DebugTensorWatch_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_tensorflow_DebugTensorWatch_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_DebugTensorWatch_descriptor, + new java.lang.String[] { "NodeName", "OutputSlot", "DebugOps", "DebugUrls", }); + internal_static_tensorflow_RunOptions_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_tensorflow_RunOptions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_RunOptions_descriptor, + new java.lang.String[] { "TraceLevel", "TimeoutInMs", "InterOpThreadPool", "DebugTensorWatchOpts", "OutputPartitionGraphs", }); + internal_static_tensorflow_RunMetadata_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_tensorflow_RunMetadata_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_RunMetadata_descriptor, + new java.lang.String[] { "StepStats", "CostGraph", "PartitionGraphs", }); + org.tensorflow.framework.CostGraphProtos.getDescriptor(); + org.tensorflow.framework.GraphProtos.getDescriptor(); + org.tensorflow.framework.StepStatsProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphDef.java new file mode 100644 index 0000000000000..31ee6022906e4 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphDef.java @@ -0,0 +1,4058 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/cost_graph.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.CostGraphDef} + */ +public final class CostGraphDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.CostGraphDef) + CostGraphDefOrBuilder { + // Use CostGraphDef.newBuilder() to construct. + private CostGraphDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CostGraphDef() { + node_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private CostGraphDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + node_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + node_.add( + input.readMessage(org.tensorflow.framework.CostGraphDef.Node.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + node_ = java.util.Collections.unmodifiableList(node_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.CostGraphDef.class, org.tensorflow.framework.CostGraphDef.Builder.class); + } + + public interface NodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.CostGraphDef.Node) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The name of the node. Names are globally unique.
+     * 
+ * + * optional string name = 1; + */ + java.lang.String getName(); + /** + *
+     * The name of the node. Names are globally unique.
+     * 
+ * + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+     * The device of the node. Can be empty if the node is mapped to the
+     * default partition or partitioning hasn't been run yet.
+     * 
+ * + * optional string device = 2; + */ + java.lang.String getDevice(); + /** + *
+     * The device of the node. Can be empty if the node is mapped to the
+     * default partition or partitioning hasn't been run yet.
+     * 
+ * + * optional string device = 2; + */ + com.google.protobuf.ByteString + getDeviceBytes(); + + /** + *
+     * The id of the node. Node ids are only unique inside a partition.
+     * 
+ * + * optional int32 id = 3; + */ + int getId(); + + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + java.util.List + getInputInfoList(); + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + org.tensorflow.framework.CostGraphDef.Node.InputInfo getInputInfo(int index); + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + int getInputInfoCount(); + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + java.util.List + getInputInfoOrBuilderList(); + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + org.tensorflow.framework.CostGraphDef.Node.InputInfoOrBuilder getInputInfoOrBuilder( + int index); + + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + java.util.List + getOutputInfoList(); + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + org.tensorflow.framework.CostGraphDef.Node.OutputInfo getOutputInfo(int index); + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + int getOutputInfoCount(); + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + java.util.List + getOutputInfoOrBuilderList(); + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + org.tensorflow.framework.CostGraphDef.Node.OutputInfoOrBuilder getOutputInfoOrBuilder( + int index); + + /** + *
+     * Temporary memory used by this node.
+     * 
+ * + * optional int64 temporary_memory_size = 6; + */ + long getTemporaryMemorySize(); + + /** + *
+     * Estimate of the computational cost of this node.
+     * 
+ * + * optional int64 compute_cost = 9; + */ + long getComputeCost(); + + /** + *
+     * If true, the output is permanent: it can't be discarded, because this
+     * node is part of the "final output". Nodes may depend on final nodes.
+     * 
+ * + * optional bool is_final = 7; + */ + boolean getIsFinal(); + + /** + *
+     * Ids of the control inputs for this node.
+     * 
+ * + * repeated int32 control_input = 8; + */ + java.util.List getControlInputList(); + /** + *
+     * Ids of the control inputs for this node.
+     * 
+ * + * repeated int32 control_input = 8; + */ + int getControlInputCount(); + /** + *
+     * Ids of the control inputs for this node.
+     * 
+ * + * repeated int32 control_input = 8; + */ + int getControlInput(int index); + } + /** + * Protobuf type {@code tensorflow.CostGraphDef.Node} + */ + public static final class Node extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.CostGraphDef.Node) + NodeOrBuilder { + // Use Node.newBuilder() to construct. + private Node(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Node() { + name_ = ""; + device_ = ""; + id_ = 0; + inputInfo_ = java.util.Collections.emptyList(); + outputInfo_ = java.util.Collections.emptyList(); + temporaryMemorySize_ = 0L; + computeCost_ = 0L; + isFinal_ = false; + controlInput_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private Node( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + device_ = s; + break; + } + case 24: { + + id_ = input.readInt32(); + break; + } + case 34: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + inputInfo_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + inputInfo_.add( + input.readMessage(org.tensorflow.framework.CostGraphDef.Node.InputInfo.parser(), extensionRegistry)); + break; + } + case 42: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + outputInfo_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000010; + } + outputInfo_.add( + input.readMessage(org.tensorflow.framework.CostGraphDef.Node.OutputInfo.parser(), extensionRegistry)); + break; + } + case 48: { + + temporaryMemorySize_ = input.readInt64(); + break; + } + case 56: { + + isFinal_ = input.readBool(); + break; + } + case 64: { + if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) { + controlInput_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000100; + } + controlInput_.add(input.readInt32()); + break; + } + case 66: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000100) == 0x00000100) && input.getBytesUntilLimit() > 0) { + controlInput_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000100; + } + while (input.getBytesUntilLimit() > 0) { + controlInput_.add(input.readInt32()); + } + input.popLimit(limit); + break; + } + case 72: { + + computeCost_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + inputInfo_ = java.util.Collections.unmodifiableList(inputInfo_); + } + if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + outputInfo_ = java.util.Collections.unmodifiableList(outputInfo_); + } + if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) { + controlInput_ = java.util.Collections.unmodifiableList(controlInput_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.CostGraphDef.Node.class, org.tensorflow.framework.CostGraphDef.Node.Builder.class); + } + + public interface InputInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.CostGraphDef.Node.InputInfo) + com.google.protobuf.MessageOrBuilder { + + /** + * optional int32 preceding_node = 1; + */ + int getPrecedingNode(); + + /** + * optional int32 preceding_port = 2; + */ + int getPrecedingPort(); + } + /** + *
+     * Inputs of this node. They must be executed before this node can be
+     * executed. An input is a particular output of another node, specified
+     * by the node id and the output index.
+     * 
+ * + * Protobuf type {@code tensorflow.CostGraphDef.Node.InputInfo} + */ + public static final class InputInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.CostGraphDef.Node.InputInfo) + InputInfoOrBuilder { + // Use InputInfo.newBuilder() to construct. + private InputInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private InputInfo() { + precedingNode_ = 0; + precedingPort_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private InputInfo( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + precedingNode_ = input.readInt32(); + break; + } + case 16: { + + precedingPort_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_InputInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_InputInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.CostGraphDef.Node.InputInfo.class, org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder.class); + } + + public static final int PRECEDING_NODE_FIELD_NUMBER = 1; + private int precedingNode_; + /** + * optional int32 preceding_node = 1; + */ + public int getPrecedingNode() { + return precedingNode_; + } + + public static final int PRECEDING_PORT_FIELD_NUMBER = 2; + private int precedingPort_; + /** + * optional int32 preceding_port = 2; + */ + public int getPrecedingPort() { + return precedingPort_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (precedingNode_ != 0) { + output.writeInt32(1, precedingNode_); + } + if (precedingPort_ != 0) { + output.writeInt32(2, precedingPort_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (precedingNode_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, precedingNode_); + } + if (precedingPort_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, precedingPort_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.CostGraphDef.Node.InputInfo)) { + return super.equals(obj); + } + org.tensorflow.framework.CostGraphDef.Node.InputInfo other = (org.tensorflow.framework.CostGraphDef.Node.InputInfo) obj; + + boolean result = true; + result = result && (getPrecedingNode() + == other.getPrecedingNode()); + result = result && (getPrecedingPort() + == other.getPrecedingPort()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + PRECEDING_NODE_FIELD_NUMBER; + hash = (53 * hash) + getPrecedingNode(); + hash = (37 * hash) + PRECEDING_PORT_FIELD_NUMBER; + hash = (53 * hash) + getPrecedingPort(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.CostGraphDef.Node.InputInfo prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+       * Inputs of this node. They must be executed before this node can be
+       * executed. An input is a particular output of another node, specified
+       * by the node id and the output index.
+       * 
+ * + * Protobuf type {@code tensorflow.CostGraphDef.Node.InputInfo} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.CostGraphDef.Node.InputInfo) + org.tensorflow.framework.CostGraphDef.Node.InputInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_InputInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_InputInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.CostGraphDef.Node.InputInfo.class, org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder.class); + } + + // Construct using org.tensorflow.framework.CostGraphDef.Node.InputInfo.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + precedingNode_ = 0; + + precedingPort_ = 0; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_InputInfo_descriptor; + } + + public org.tensorflow.framework.CostGraphDef.Node.InputInfo getDefaultInstanceForType() { + return org.tensorflow.framework.CostGraphDef.Node.InputInfo.getDefaultInstance(); + } + + public org.tensorflow.framework.CostGraphDef.Node.InputInfo build() { + org.tensorflow.framework.CostGraphDef.Node.InputInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.CostGraphDef.Node.InputInfo buildPartial() { + org.tensorflow.framework.CostGraphDef.Node.InputInfo result = new org.tensorflow.framework.CostGraphDef.Node.InputInfo(this); + result.precedingNode_ = precedingNode_; + result.precedingPort_ = precedingPort_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.CostGraphDef.Node.InputInfo) { + return mergeFrom((org.tensorflow.framework.CostGraphDef.Node.InputInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.CostGraphDef.Node.InputInfo other) { + if (other == org.tensorflow.framework.CostGraphDef.Node.InputInfo.getDefaultInstance()) return this; + if (other.getPrecedingNode() != 0) { + setPrecedingNode(other.getPrecedingNode()); + } + if (other.getPrecedingPort() != 0) { + setPrecedingPort(other.getPrecedingPort()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.CostGraphDef.Node.InputInfo parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.CostGraphDef.Node.InputInfo) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int precedingNode_ ; + /** + * optional int32 preceding_node = 1; + */ + public int getPrecedingNode() { + return precedingNode_; + } + /** + * optional int32 preceding_node = 1; + */ + public Builder setPrecedingNode(int value) { + + precedingNode_ = value; + onChanged(); + return this; + } + /** + * optional int32 preceding_node = 1; + */ + public Builder clearPrecedingNode() { + + precedingNode_ = 0; + onChanged(); + return this; + } + + private int precedingPort_ ; + /** + * optional int32 preceding_port = 2; + */ + public int getPrecedingPort() { + return precedingPort_; + } + /** + * optional int32 preceding_port = 2; + */ + public Builder setPrecedingPort(int value) { + + precedingPort_ = value; + onChanged(); + return this; + } + /** + * optional int32 preceding_port = 2; + */ + public Builder clearPrecedingPort() { + + precedingPort_ = 0; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.CostGraphDef.Node.InputInfo) + } + + // @@protoc_insertion_point(class_scope:tensorflow.CostGraphDef.Node.InputInfo) + private static final org.tensorflow.framework.CostGraphDef.Node.InputInfo DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.CostGraphDef.Node.InputInfo(); + } + + public static org.tensorflow.framework.CostGraphDef.Node.InputInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public InputInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new InputInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.CostGraphDef.Node.InputInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface OutputInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.CostGraphDef.Node.OutputInfo) + com.google.protobuf.MessageOrBuilder { + + /** + * optional int64 size = 1; + */ + long getSize(); + + /** + *
+       * If >= 0, the output is an alias of an input. Note that an alias input
+       * may itself be an alias. The algorithm will therefore need to follow
+       * those pointers.
+       * 
+ * + * optional int64 alias_input_port = 2; + */ + long getAliasInputPort(); + + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + boolean hasShape(); + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + org.tensorflow.framework.TensorShapeProto getShape(); + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder(); + + /** + * optional .tensorflow.DataType dtype = 4; + */ + int getDtypeValue(); + /** + * optional .tensorflow.DataType dtype = 4; + */ + org.tensorflow.framework.DataType getDtype(); + } + /** + *
+     * Outputs of this node.
+     * 
+ * + * Protobuf type {@code tensorflow.CostGraphDef.Node.OutputInfo} + */ + public static final class OutputInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.CostGraphDef.Node.OutputInfo) + OutputInfoOrBuilder { + // Use OutputInfo.newBuilder() to construct. + private OutputInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private OutputInfo() { + size_ = 0L; + aliasInputPort_ = 0L; + dtype_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private OutputInfo( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + size_ = input.readInt64(); + break; + } + case 16: { + + aliasInputPort_ = input.readInt64(); + break; + } + case 26: { + org.tensorflow.framework.TensorShapeProto.Builder subBuilder = null; + if (shape_ != null) { + subBuilder = shape_.toBuilder(); + } + shape_ = input.readMessage(org.tensorflow.framework.TensorShapeProto.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(shape_); + shape_ = subBuilder.buildPartial(); + } + + break; + } + case 32: { + int rawValue = input.readEnum(); + + dtype_ = rawValue; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_OutputInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_OutputInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.CostGraphDef.Node.OutputInfo.class, org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder.class); + } + + public static final int SIZE_FIELD_NUMBER = 1; + private long size_; + /** + * optional int64 size = 1; + */ + public long getSize() { + return size_; + } + + public static final int ALIAS_INPUT_PORT_FIELD_NUMBER = 2; + private long aliasInputPort_; + /** + *
+       * If >= 0, the output is an alias of an input. Note that an alias input
+       * may itself be an alias. The algorithm will therefore need to follow
+       * those pointers.
+       * 
+ * + * optional int64 alias_input_port = 2; + */ + public long getAliasInputPort() { + return aliasInputPort_; + } + + public static final int SHAPE_FIELD_NUMBER = 3; + private org.tensorflow.framework.TensorShapeProto shape_; + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public boolean hasShape() { + return shape_ != null; + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public org.tensorflow.framework.TensorShapeProto getShape() { + return shape_ == null ? org.tensorflow.framework.TensorShapeProto.getDefaultInstance() : shape_; + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder() { + return getShape(); + } + + public static final int DTYPE_FIELD_NUMBER = 4; + private int dtype_; + /** + * optional .tensorflow.DataType dtype = 4; + */ + public int getDtypeValue() { + return dtype_; + } + /** + * optional .tensorflow.DataType dtype = 4; + */ + public org.tensorflow.framework.DataType getDtype() { + org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf(dtype_); + return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (size_ != 0L) { + output.writeInt64(1, size_); + } + if (aliasInputPort_ != 0L) { + output.writeInt64(2, aliasInputPort_); + } + if (shape_ != null) { + output.writeMessage(3, getShape()); + } + if (dtype_ != org.tensorflow.framework.DataType.DT_INVALID.getNumber()) { + output.writeEnum(4, dtype_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (size_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, size_); + } + if (aliasInputPort_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, aliasInputPort_); + } + if (shape_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getShape()); + } + if (dtype_ != org.tensorflow.framework.DataType.DT_INVALID.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, dtype_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.CostGraphDef.Node.OutputInfo)) { + return super.equals(obj); + } + org.tensorflow.framework.CostGraphDef.Node.OutputInfo other = (org.tensorflow.framework.CostGraphDef.Node.OutputInfo) obj; + + boolean result = true; + result = result && (getSize() + == other.getSize()); + result = result && (getAliasInputPort() + == other.getAliasInputPort()); + result = result && (hasShape() == other.hasShape()); + if (hasShape()) { + result = result && getShape() + .equals(other.getShape()); + } + result = result && dtype_ == other.dtype_; + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + SIZE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getSize()); + hash = (37 * hash) + ALIAS_INPUT_PORT_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAliasInputPort()); + if (hasShape()) { + hash = (37 * hash) + SHAPE_FIELD_NUMBER; + hash = (53 * hash) + getShape().hashCode(); + } + hash = (37 * hash) + DTYPE_FIELD_NUMBER; + hash = (53 * hash) + dtype_; + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.CostGraphDef.Node.OutputInfo prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+       * Outputs of this node.
+       * 
+ * + * Protobuf type {@code tensorflow.CostGraphDef.Node.OutputInfo} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.CostGraphDef.Node.OutputInfo) + org.tensorflow.framework.CostGraphDef.Node.OutputInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_OutputInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_OutputInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.CostGraphDef.Node.OutputInfo.class, org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder.class); + } + + // Construct using org.tensorflow.framework.CostGraphDef.Node.OutputInfo.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + size_ = 0L; + + aliasInputPort_ = 0L; + + if (shapeBuilder_ == null) { + shape_ = null; + } else { + shape_ = null; + shapeBuilder_ = null; + } + dtype_ = 0; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_OutputInfo_descriptor; + } + + public org.tensorflow.framework.CostGraphDef.Node.OutputInfo getDefaultInstanceForType() { + return org.tensorflow.framework.CostGraphDef.Node.OutputInfo.getDefaultInstance(); + } + + public org.tensorflow.framework.CostGraphDef.Node.OutputInfo build() { + org.tensorflow.framework.CostGraphDef.Node.OutputInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.CostGraphDef.Node.OutputInfo buildPartial() { + org.tensorflow.framework.CostGraphDef.Node.OutputInfo result = new org.tensorflow.framework.CostGraphDef.Node.OutputInfo(this); + result.size_ = size_; + result.aliasInputPort_ = aliasInputPort_; + if (shapeBuilder_ == null) { + result.shape_ = shape_; + } else { + result.shape_ = shapeBuilder_.build(); + } + result.dtype_ = dtype_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.CostGraphDef.Node.OutputInfo) { + return mergeFrom((org.tensorflow.framework.CostGraphDef.Node.OutputInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.CostGraphDef.Node.OutputInfo other) { + if (other == org.tensorflow.framework.CostGraphDef.Node.OutputInfo.getDefaultInstance()) return this; + if (other.getSize() != 0L) { + setSize(other.getSize()); + } + if (other.getAliasInputPort() != 0L) { + setAliasInputPort(other.getAliasInputPort()); + } + if (other.hasShape()) { + mergeShape(other.getShape()); + } + if (other.dtype_ != 0) { + setDtypeValue(other.getDtypeValue()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.CostGraphDef.Node.OutputInfo parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.CostGraphDef.Node.OutputInfo) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private long size_ ; + /** + * optional int64 size = 1; + */ + public long getSize() { + return size_; + } + /** + * optional int64 size = 1; + */ + public Builder setSize(long value) { + + size_ = value; + onChanged(); + return this; + } + /** + * optional int64 size = 1; + */ + public Builder clearSize() { + + size_ = 0L; + onChanged(); + return this; + } + + private long aliasInputPort_ ; + /** + *
+         * If >= 0, the output is an alias of an input. Note that an alias input
+         * may itself be an alias. The algorithm will therefore need to follow
+         * those pointers.
+         * 
+ * + * optional int64 alias_input_port = 2; + */ + public long getAliasInputPort() { + return aliasInputPort_; + } + /** + *
+         * If >= 0, the output is an alias of an input. Note that an alias input
+         * may itself be an alias. The algorithm will therefore need to follow
+         * those pointers.
+         * 
+ * + * optional int64 alias_input_port = 2; + */ + public Builder setAliasInputPort(long value) { + + aliasInputPort_ = value; + onChanged(); + return this; + } + /** + *
+         * If >= 0, the output is an alias of an input. Note that an alias input
+         * may itself be an alias. The algorithm will therefore need to follow
+         * those pointers.
+         * 
+ * + * optional int64 alias_input_port = 2; + */ + public Builder clearAliasInputPort() { + + aliasInputPort_ = 0L; + onChanged(); + return this; + } + + private org.tensorflow.framework.TensorShapeProto shape_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder> shapeBuilder_; + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public boolean hasShape() { + return shapeBuilder_ != null || shape_ != null; + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public org.tensorflow.framework.TensorShapeProto getShape() { + if (shapeBuilder_ == null) { + return shape_ == null ? org.tensorflow.framework.TensorShapeProto.getDefaultInstance() : shape_; + } else { + return shapeBuilder_.getMessage(); + } + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public Builder setShape(org.tensorflow.framework.TensorShapeProto value) { + if (shapeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + shape_ = value; + onChanged(); + } else { + shapeBuilder_.setMessage(value); + } + + return this; + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public Builder setShape( + org.tensorflow.framework.TensorShapeProto.Builder builderForValue) { + if (shapeBuilder_ == null) { + shape_ = builderForValue.build(); + onChanged(); + } else { + shapeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public Builder mergeShape(org.tensorflow.framework.TensorShapeProto value) { + if (shapeBuilder_ == null) { + if (shape_ != null) { + shape_ = + org.tensorflow.framework.TensorShapeProto.newBuilder(shape_).mergeFrom(value).buildPartial(); + } else { + shape_ = value; + } + onChanged(); + } else { + shapeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public Builder clearShape() { + if (shapeBuilder_ == null) { + shape_ = null; + onChanged(); + } else { + shape_ = null; + shapeBuilder_ = null; + } + + return this; + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public org.tensorflow.framework.TensorShapeProto.Builder getShapeBuilder() { + + onChanged(); + return getShapeFieldBuilder().getBuilder(); + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + public org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder() { + if (shapeBuilder_ != null) { + return shapeBuilder_.getMessageOrBuilder(); + } else { + return shape_ == null ? + org.tensorflow.framework.TensorShapeProto.getDefaultInstance() : shape_; + } + } + /** + * optional .tensorflow.TensorShapeProto shape = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder> + getShapeFieldBuilder() { + if (shapeBuilder_ == null) { + shapeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder>( + getShape(), + getParentForChildren(), + isClean()); + shape_ = null; + } + return shapeBuilder_; + } + + private int dtype_ = 0; + /** + * optional .tensorflow.DataType dtype = 4; + */ + public int getDtypeValue() { + return dtype_; + } + /** + * optional .tensorflow.DataType dtype = 4; + */ + public Builder setDtypeValue(int value) { + dtype_ = value; + onChanged(); + return this; + } + /** + * optional .tensorflow.DataType dtype = 4; + */ + public org.tensorflow.framework.DataType getDtype() { + org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf(dtype_); + return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result; + } + /** + * optional .tensorflow.DataType dtype = 4; + */ + public Builder setDtype(org.tensorflow.framework.DataType value) { + if (value == null) { + throw new NullPointerException(); + } + + dtype_ = value.getNumber(); + onChanged(); + return this; + } + /** + * optional .tensorflow.DataType dtype = 4; + */ + public Builder clearDtype() { + + dtype_ = 0; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.CostGraphDef.Node.OutputInfo) + } + + // @@protoc_insertion_point(class_scope:tensorflow.CostGraphDef.Node.OutputInfo) + private static final org.tensorflow.framework.CostGraphDef.Node.OutputInfo DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.CostGraphDef.Node.OutputInfo(); + } + + public static org.tensorflow.framework.CostGraphDef.Node.OutputInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OutputInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OutputInfo(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.CostGraphDef.Node.OutputInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+     * The name of the node. Names are globally unique.
+     * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * The name of the node. Names are globally unique.
+     * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DEVICE_FIELD_NUMBER = 2; + private volatile java.lang.Object device_; + /** + *
+     * The device of the node. Can be empty if the node is mapped to the
+     * default partition or partitioning hasn't been run yet.
+     * 
+ * + * optional string device = 2; + */ + public java.lang.String getDevice() { + java.lang.Object ref = device_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + device_ = s; + return s; + } + } + /** + *
+     * The device of the node. Can be empty if the node is mapped to the
+     * default partition or partitioning hasn't been run yet.
+     * 
+ * + * optional string device = 2; + */ + public com.google.protobuf.ByteString + getDeviceBytes() { + java.lang.Object ref = device_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + device_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ID_FIELD_NUMBER = 3; + private int id_; + /** + *
+     * The id of the node. Node ids are only unique inside a partition.
+     * 
+ * + * optional int32 id = 3; + */ + public int getId() { + return id_; + } + + public static final int INPUT_INFO_FIELD_NUMBER = 4; + private java.util.List inputInfo_; + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public java.util.List getInputInfoList() { + return inputInfo_; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public java.util.List + getInputInfoOrBuilderList() { + return inputInfo_; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public int getInputInfoCount() { + return inputInfo_.size(); + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public org.tensorflow.framework.CostGraphDef.Node.InputInfo getInputInfo(int index) { + return inputInfo_.get(index); + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public org.tensorflow.framework.CostGraphDef.Node.InputInfoOrBuilder getInputInfoOrBuilder( + int index) { + return inputInfo_.get(index); + } + + public static final int OUTPUT_INFO_FIELD_NUMBER = 5; + private java.util.List outputInfo_; + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public java.util.List getOutputInfoList() { + return outputInfo_; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public java.util.List + getOutputInfoOrBuilderList() { + return outputInfo_; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public int getOutputInfoCount() { + return outputInfo_.size(); + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public org.tensorflow.framework.CostGraphDef.Node.OutputInfo getOutputInfo(int index) { + return outputInfo_.get(index); + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public org.tensorflow.framework.CostGraphDef.Node.OutputInfoOrBuilder getOutputInfoOrBuilder( + int index) { + return outputInfo_.get(index); + } + + public static final int TEMPORARY_MEMORY_SIZE_FIELD_NUMBER = 6; + private long temporaryMemorySize_; + /** + *
+     * Temporary memory used by this node.
+     * 
+ * + * optional int64 temporary_memory_size = 6; + */ + public long getTemporaryMemorySize() { + return temporaryMemorySize_; + } + + public static final int COMPUTE_COST_FIELD_NUMBER = 9; + private long computeCost_; + /** + *
+     * Estimate of the computational cost of this node.
+     * 
+ * + * optional int64 compute_cost = 9; + */ + public long getComputeCost() { + return computeCost_; + } + + public static final int IS_FINAL_FIELD_NUMBER = 7; + private boolean isFinal_; + /** + *
+     * If true, the output is permanent: it can't be discarded, because this
+     * node is part of the "final output". Nodes may depend on final nodes.
+     * 
+ * + * optional bool is_final = 7; + */ + public boolean getIsFinal() { + return isFinal_; + } + + public static final int CONTROL_INPUT_FIELD_NUMBER = 8; + private java.util.List controlInput_; + /** + *
+     * Ids of the control inputs for this node.
+     * 
+ * + * repeated int32 control_input = 8; + */ + public java.util.List + getControlInputList() { + return controlInput_; + } + /** + *
+     * Ids of the control inputs for this node.
+     * 
+ * + * repeated int32 control_input = 8; + */ + public int getControlInputCount() { + return controlInput_.size(); + } + /** + *
+     * Ids of the control inputs for this node.
+     * 
+ * + * repeated int32 control_input = 8; + */ + public int getControlInput(int index) { + return controlInput_.get(index); + } + private int controlInputMemoizedSerializedSize = -1; + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (!getDeviceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, device_); + } + if (id_ != 0) { + output.writeInt32(3, id_); + } + for (int i = 0; i < inputInfo_.size(); i++) { + output.writeMessage(4, inputInfo_.get(i)); + } + for (int i = 0; i < outputInfo_.size(); i++) { + output.writeMessage(5, outputInfo_.get(i)); + } + if (temporaryMemorySize_ != 0L) { + output.writeInt64(6, temporaryMemorySize_); + } + if (isFinal_ != false) { + output.writeBool(7, isFinal_); + } + if (getControlInputList().size() > 0) { + output.writeUInt32NoTag(66); + output.writeUInt32NoTag(controlInputMemoizedSerializedSize); + } + for (int i = 0; i < controlInput_.size(); i++) { + output.writeInt32NoTag(controlInput_.get(i)); + } + if (computeCost_ != 0L) { + output.writeInt64(9, computeCost_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (!getDeviceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, device_); + } + if (id_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, id_); + } + for (int i = 0; i < inputInfo_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, inputInfo_.get(i)); + } + for (int i = 0; i < outputInfo_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, outputInfo_.get(i)); + } + if (temporaryMemorySize_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(6, temporaryMemorySize_); + } + if (isFinal_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(7, isFinal_); + } + { + int dataSize = 0; + for (int i = 0; i < controlInput_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(controlInput_.get(i)); + } + size += dataSize; + if (!getControlInputList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + controlInputMemoizedSerializedSize = dataSize; + } + if (computeCost_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(9, computeCost_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.CostGraphDef.Node)) { + return super.equals(obj); + } + org.tensorflow.framework.CostGraphDef.Node other = (org.tensorflow.framework.CostGraphDef.Node) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && getDevice() + .equals(other.getDevice()); + result = result && (getId() + == other.getId()); + result = result && getInputInfoList() + .equals(other.getInputInfoList()); + result = result && getOutputInfoList() + .equals(other.getOutputInfoList()); + result = result && (getTemporaryMemorySize() + == other.getTemporaryMemorySize()); + result = result && (getComputeCost() + == other.getComputeCost()); + result = result && (getIsFinal() + == other.getIsFinal()); + result = result && getControlInputList() + .equals(other.getControlInputList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + DEVICE_FIELD_NUMBER; + hash = (53 * hash) + getDevice().hashCode(); + hash = (37 * hash) + ID_FIELD_NUMBER; + hash = (53 * hash) + getId(); + if (getInputInfoCount() > 0) { + hash = (37 * hash) + INPUT_INFO_FIELD_NUMBER; + hash = (53 * hash) + getInputInfoList().hashCode(); + } + if (getOutputInfoCount() > 0) { + hash = (37 * hash) + OUTPUT_INFO_FIELD_NUMBER; + hash = (53 * hash) + getOutputInfoList().hashCode(); + } + hash = (37 * hash) + TEMPORARY_MEMORY_SIZE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTemporaryMemorySize()); + hash = (37 * hash) + COMPUTE_COST_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getComputeCost()); + hash = (37 * hash) + IS_FINAL_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIsFinal()); + if (getControlInputCount() > 0) { + hash = (37 * hash) + CONTROL_INPUT_FIELD_NUMBER; + hash = (53 * hash) + getControlInputList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.CostGraphDef.Node parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.CostGraphDef.Node parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.CostGraphDef.Node parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef.Node parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef.Node parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef.Node parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef.Node parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.CostGraphDef.Node prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.CostGraphDef.Node} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.CostGraphDef.Node) + org.tensorflow.framework.CostGraphDef.NodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.CostGraphDef.Node.class, org.tensorflow.framework.CostGraphDef.Node.Builder.class); + } + + // Construct using org.tensorflow.framework.CostGraphDef.Node.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getInputInfoFieldBuilder(); + getOutputInfoFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + device_ = ""; + + id_ = 0; + + if (inputInfoBuilder_ == null) { + inputInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + inputInfoBuilder_.clear(); + } + if (outputInfoBuilder_ == null) { + outputInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + } else { + outputInfoBuilder_.clear(); + } + temporaryMemorySize_ = 0L; + + computeCost_ = 0L; + + isFinal_ = false; + + controlInput_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000100); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_Node_descriptor; + } + + public org.tensorflow.framework.CostGraphDef.Node getDefaultInstanceForType() { + return org.tensorflow.framework.CostGraphDef.Node.getDefaultInstance(); + } + + public org.tensorflow.framework.CostGraphDef.Node build() { + org.tensorflow.framework.CostGraphDef.Node result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.CostGraphDef.Node buildPartial() { + org.tensorflow.framework.CostGraphDef.Node result = new org.tensorflow.framework.CostGraphDef.Node(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.name_ = name_; + result.device_ = device_; + result.id_ = id_; + if (inputInfoBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + inputInfo_ = java.util.Collections.unmodifiableList(inputInfo_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.inputInfo_ = inputInfo_; + } else { + result.inputInfo_ = inputInfoBuilder_.build(); + } + if (outputInfoBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { + outputInfo_ = java.util.Collections.unmodifiableList(outputInfo_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.outputInfo_ = outputInfo_; + } else { + result.outputInfo_ = outputInfoBuilder_.build(); + } + result.temporaryMemorySize_ = temporaryMemorySize_; + result.computeCost_ = computeCost_; + result.isFinal_ = isFinal_; + if (((bitField0_ & 0x00000100) == 0x00000100)) { + controlInput_ = java.util.Collections.unmodifiableList(controlInput_); + bitField0_ = (bitField0_ & ~0x00000100); + } + result.controlInput_ = controlInput_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.CostGraphDef.Node) { + return mergeFrom((org.tensorflow.framework.CostGraphDef.Node)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.CostGraphDef.Node other) { + if (other == org.tensorflow.framework.CostGraphDef.Node.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.getDevice().isEmpty()) { + device_ = other.device_; + onChanged(); + } + if (other.getId() != 0) { + setId(other.getId()); + } + if (inputInfoBuilder_ == null) { + if (!other.inputInfo_.isEmpty()) { + if (inputInfo_.isEmpty()) { + inputInfo_ = other.inputInfo_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureInputInfoIsMutable(); + inputInfo_.addAll(other.inputInfo_); + } + onChanged(); + } + } else { + if (!other.inputInfo_.isEmpty()) { + if (inputInfoBuilder_.isEmpty()) { + inputInfoBuilder_.dispose(); + inputInfoBuilder_ = null; + inputInfo_ = other.inputInfo_; + bitField0_ = (bitField0_ & ~0x00000008); + inputInfoBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getInputInfoFieldBuilder() : null; + } else { + inputInfoBuilder_.addAllMessages(other.inputInfo_); + } + } + } + if (outputInfoBuilder_ == null) { + if (!other.outputInfo_.isEmpty()) { + if (outputInfo_.isEmpty()) { + outputInfo_ = other.outputInfo_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureOutputInfoIsMutable(); + outputInfo_.addAll(other.outputInfo_); + } + onChanged(); + } + } else { + if (!other.outputInfo_.isEmpty()) { + if (outputInfoBuilder_.isEmpty()) { + outputInfoBuilder_.dispose(); + outputInfoBuilder_ = null; + outputInfo_ = other.outputInfo_; + bitField0_ = (bitField0_ & ~0x00000010); + outputInfoBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getOutputInfoFieldBuilder() : null; + } else { + outputInfoBuilder_.addAllMessages(other.outputInfo_); + } + } + } + if (other.getTemporaryMemorySize() != 0L) { + setTemporaryMemorySize(other.getTemporaryMemorySize()); + } + if (other.getComputeCost() != 0L) { + setComputeCost(other.getComputeCost()); + } + if (other.getIsFinal() != false) { + setIsFinal(other.getIsFinal()); + } + if (!other.controlInput_.isEmpty()) { + if (controlInput_.isEmpty()) { + controlInput_ = other.controlInput_; + bitField0_ = (bitField0_ & ~0x00000100); + } else { + ensureControlInputIsMutable(); + controlInput_.addAll(other.controlInput_); + } + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.CostGraphDef.Node parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.CostGraphDef.Node) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+       * The name of the node. Names are globally unique.
+       * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The name of the node. Names are globally unique.
+       * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The name of the node. Names are globally unique.
+       * 
+ * + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+       * The name of the node. Names are globally unique.
+       * 
+ * + * optional string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+       * The name of the node. Names are globally unique.
+       * 
+ * + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.lang.Object device_ = ""; + /** + *
+       * The device of the node. Can be empty if the node is mapped to the
+       * default partition or partitioning hasn't been run yet.
+       * 
+ * + * optional string device = 2; + */ + public java.lang.String getDevice() { + java.lang.Object ref = device_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + device_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The device of the node. Can be empty if the node is mapped to the
+       * default partition or partitioning hasn't been run yet.
+       * 
+ * + * optional string device = 2; + */ + public com.google.protobuf.ByteString + getDeviceBytes() { + java.lang.Object ref = device_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + device_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The device of the node. Can be empty if the node is mapped to the
+       * default partition or partitioning hasn't been run yet.
+       * 
+ * + * optional string device = 2; + */ + public Builder setDevice( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + device_ = value; + onChanged(); + return this; + } + /** + *
+       * The device of the node. Can be empty if the node is mapped to the
+       * default partition or partitioning hasn't been run yet.
+       * 
+ * + * optional string device = 2; + */ + public Builder clearDevice() { + + device_ = getDefaultInstance().getDevice(); + onChanged(); + return this; + } + /** + *
+       * The device of the node. Can be empty if the node is mapped to the
+       * default partition or partitioning hasn't been run yet.
+       * 
+ * + * optional string device = 2; + */ + public Builder setDeviceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + device_ = value; + onChanged(); + return this; + } + + private int id_ ; + /** + *
+       * The id of the node. Node ids are only unique inside a partition.
+       * 
+ * + * optional int32 id = 3; + */ + public int getId() { + return id_; + } + /** + *
+       * The id of the node. Node ids are only unique inside a partition.
+       * 
+ * + * optional int32 id = 3; + */ + public Builder setId(int value) { + + id_ = value; + onChanged(); + return this; + } + /** + *
+       * The id of the node. Node ids are only unique inside a partition.
+       * 
+ * + * optional int32 id = 3; + */ + public Builder clearId() { + + id_ = 0; + onChanged(); + return this; + } + + private java.util.List inputInfo_ = + java.util.Collections.emptyList(); + private void ensureInputInfoIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + inputInfo_ = new java.util.ArrayList(inputInfo_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.CostGraphDef.Node.InputInfo, org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder, org.tensorflow.framework.CostGraphDef.Node.InputInfoOrBuilder> inputInfoBuilder_; + + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public java.util.List getInputInfoList() { + if (inputInfoBuilder_ == null) { + return java.util.Collections.unmodifiableList(inputInfo_); + } else { + return inputInfoBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public int getInputInfoCount() { + if (inputInfoBuilder_ == null) { + return inputInfo_.size(); + } else { + return inputInfoBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public org.tensorflow.framework.CostGraphDef.Node.InputInfo getInputInfo(int index) { + if (inputInfoBuilder_ == null) { + return inputInfo_.get(index); + } else { + return inputInfoBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public Builder setInputInfo( + int index, org.tensorflow.framework.CostGraphDef.Node.InputInfo value) { + if (inputInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputInfoIsMutable(); + inputInfo_.set(index, value); + onChanged(); + } else { + inputInfoBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public Builder setInputInfo( + int index, org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder builderForValue) { + if (inputInfoBuilder_ == null) { + ensureInputInfoIsMutable(); + inputInfo_.set(index, builderForValue.build()); + onChanged(); + } else { + inputInfoBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public Builder addInputInfo(org.tensorflow.framework.CostGraphDef.Node.InputInfo value) { + if (inputInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputInfoIsMutable(); + inputInfo_.add(value); + onChanged(); + } else { + inputInfoBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public Builder addInputInfo( + int index, org.tensorflow.framework.CostGraphDef.Node.InputInfo value) { + if (inputInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputInfoIsMutable(); + inputInfo_.add(index, value); + onChanged(); + } else { + inputInfoBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public Builder addInputInfo( + org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder builderForValue) { + if (inputInfoBuilder_ == null) { + ensureInputInfoIsMutable(); + inputInfo_.add(builderForValue.build()); + onChanged(); + } else { + inputInfoBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public Builder addInputInfo( + int index, org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder builderForValue) { + if (inputInfoBuilder_ == null) { + ensureInputInfoIsMutable(); + inputInfo_.add(index, builderForValue.build()); + onChanged(); + } else { + inputInfoBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public Builder addAllInputInfo( + java.lang.Iterable values) { + if (inputInfoBuilder_ == null) { + ensureInputInfoIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, inputInfo_); + onChanged(); + } else { + inputInfoBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public Builder clearInputInfo() { + if (inputInfoBuilder_ == null) { + inputInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + inputInfoBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public Builder removeInputInfo(int index) { + if (inputInfoBuilder_ == null) { + ensureInputInfoIsMutable(); + inputInfo_.remove(index); + onChanged(); + } else { + inputInfoBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder getInputInfoBuilder( + int index) { + return getInputInfoFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public org.tensorflow.framework.CostGraphDef.Node.InputInfoOrBuilder getInputInfoOrBuilder( + int index) { + if (inputInfoBuilder_ == null) { + return inputInfo_.get(index); } else { + return inputInfoBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public java.util.List + getInputInfoOrBuilderList() { + if (inputInfoBuilder_ != null) { + return inputInfoBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(inputInfo_); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder addInputInfoBuilder() { + return getInputInfoFieldBuilder().addBuilder( + org.tensorflow.framework.CostGraphDef.Node.InputInfo.getDefaultInstance()); + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder addInputInfoBuilder( + int index) { + return getInputInfoFieldBuilder().addBuilder( + index, org.tensorflow.framework.CostGraphDef.Node.InputInfo.getDefaultInstance()); + } + /** + * repeated .tensorflow.CostGraphDef.Node.InputInfo input_info = 4; + */ + public java.util.List + getInputInfoBuilderList() { + return getInputInfoFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.CostGraphDef.Node.InputInfo, org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder, org.tensorflow.framework.CostGraphDef.Node.InputInfoOrBuilder> + getInputInfoFieldBuilder() { + if (inputInfoBuilder_ == null) { + inputInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.CostGraphDef.Node.InputInfo, org.tensorflow.framework.CostGraphDef.Node.InputInfo.Builder, org.tensorflow.framework.CostGraphDef.Node.InputInfoOrBuilder>( + inputInfo_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + inputInfo_ = null; + } + return inputInfoBuilder_; + } + + private java.util.List outputInfo_ = + java.util.Collections.emptyList(); + private void ensureOutputInfoIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + outputInfo_ = new java.util.ArrayList(outputInfo_); + bitField0_ |= 0x00000010; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.CostGraphDef.Node.OutputInfo, org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder, org.tensorflow.framework.CostGraphDef.Node.OutputInfoOrBuilder> outputInfoBuilder_; + + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public java.util.List getOutputInfoList() { + if (outputInfoBuilder_ == null) { + return java.util.Collections.unmodifiableList(outputInfo_); + } else { + return outputInfoBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public int getOutputInfoCount() { + if (outputInfoBuilder_ == null) { + return outputInfo_.size(); + } else { + return outputInfoBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public org.tensorflow.framework.CostGraphDef.Node.OutputInfo getOutputInfo(int index) { + if (outputInfoBuilder_ == null) { + return outputInfo_.get(index); + } else { + return outputInfoBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public Builder setOutputInfo( + int index, org.tensorflow.framework.CostGraphDef.Node.OutputInfo value) { + if (outputInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputInfoIsMutable(); + outputInfo_.set(index, value); + onChanged(); + } else { + outputInfoBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public Builder setOutputInfo( + int index, org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder builderForValue) { + if (outputInfoBuilder_ == null) { + ensureOutputInfoIsMutable(); + outputInfo_.set(index, builderForValue.build()); + onChanged(); + } else { + outputInfoBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public Builder addOutputInfo(org.tensorflow.framework.CostGraphDef.Node.OutputInfo value) { + if (outputInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputInfoIsMutable(); + outputInfo_.add(value); + onChanged(); + } else { + outputInfoBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public Builder addOutputInfo( + int index, org.tensorflow.framework.CostGraphDef.Node.OutputInfo value) { + if (outputInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputInfoIsMutable(); + outputInfo_.add(index, value); + onChanged(); + } else { + outputInfoBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public Builder addOutputInfo( + org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder builderForValue) { + if (outputInfoBuilder_ == null) { + ensureOutputInfoIsMutable(); + outputInfo_.add(builderForValue.build()); + onChanged(); + } else { + outputInfoBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public Builder addOutputInfo( + int index, org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder builderForValue) { + if (outputInfoBuilder_ == null) { + ensureOutputInfoIsMutable(); + outputInfo_.add(index, builderForValue.build()); + onChanged(); + } else { + outputInfoBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public Builder addAllOutputInfo( + java.lang.Iterable values) { + if (outputInfoBuilder_ == null) { + ensureOutputInfoIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, outputInfo_); + onChanged(); + } else { + outputInfoBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public Builder clearOutputInfo() { + if (outputInfoBuilder_ == null) { + outputInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + } else { + outputInfoBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public Builder removeOutputInfo(int index) { + if (outputInfoBuilder_ == null) { + ensureOutputInfoIsMutable(); + outputInfo_.remove(index); + onChanged(); + } else { + outputInfoBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder getOutputInfoBuilder( + int index) { + return getOutputInfoFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public org.tensorflow.framework.CostGraphDef.Node.OutputInfoOrBuilder getOutputInfoOrBuilder( + int index) { + if (outputInfoBuilder_ == null) { + return outputInfo_.get(index); } else { + return outputInfoBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public java.util.List + getOutputInfoOrBuilderList() { + if (outputInfoBuilder_ != null) { + return outputInfoBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(outputInfo_); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder addOutputInfoBuilder() { + return getOutputInfoFieldBuilder().addBuilder( + org.tensorflow.framework.CostGraphDef.Node.OutputInfo.getDefaultInstance()); + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder addOutputInfoBuilder( + int index) { + return getOutputInfoFieldBuilder().addBuilder( + index, org.tensorflow.framework.CostGraphDef.Node.OutputInfo.getDefaultInstance()); + } + /** + * repeated .tensorflow.CostGraphDef.Node.OutputInfo output_info = 5; + */ + public java.util.List + getOutputInfoBuilderList() { + return getOutputInfoFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.CostGraphDef.Node.OutputInfo, org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder, org.tensorflow.framework.CostGraphDef.Node.OutputInfoOrBuilder> + getOutputInfoFieldBuilder() { + if (outputInfoBuilder_ == null) { + outputInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.CostGraphDef.Node.OutputInfo, org.tensorflow.framework.CostGraphDef.Node.OutputInfo.Builder, org.tensorflow.framework.CostGraphDef.Node.OutputInfoOrBuilder>( + outputInfo_, + ((bitField0_ & 0x00000010) == 0x00000010), + getParentForChildren(), + isClean()); + outputInfo_ = null; + } + return outputInfoBuilder_; + } + + private long temporaryMemorySize_ ; + /** + *
+       * Temporary memory used by this node.
+       * 
+ * + * optional int64 temporary_memory_size = 6; + */ + public long getTemporaryMemorySize() { + return temporaryMemorySize_; + } + /** + *
+       * Temporary memory used by this node.
+       * 
+ * + * optional int64 temporary_memory_size = 6; + */ + public Builder setTemporaryMemorySize(long value) { + + temporaryMemorySize_ = value; + onChanged(); + return this; + } + /** + *
+       * Temporary memory used by this node.
+       * 
+ * + * optional int64 temporary_memory_size = 6; + */ + public Builder clearTemporaryMemorySize() { + + temporaryMemorySize_ = 0L; + onChanged(); + return this; + } + + private long computeCost_ ; + /** + *
+       * Estimate of the computational cost of this node.
+       * 
+ * + * optional int64 compute_cost = 9; + */ + public long getComputeCost() { + return computeCost_; + } + /** + *
+       * Estimate of the computational cost of this node.
+       * 
+ * + * optional int64 compute_cost = 9; + */ + public Builder setComputeCost(long value) { + + computeCost_ = value; + onChanged(); + return this; + } + /** + *
+       * Estimate of the computational cost of this node.
+       * 
+ * + * optional int64 compute_cost = 9; + */ + public Builder clearComputeCost() { + + computeCost_ = 0L; + onChanged(); + return this; + } + + private boolean isFinal_ ; + /** + *
+       * If true, the output is permanent: it can't be discarded, because this
+       * node is part of the "final output". Nodes may depend on final nodes.
+       * 
+ * + * optional bool is_final = 7; + */ + public boolean getIsFinal() { + return isFinal_; + } + /** + *
+       * If true, the output is permanent: it can't be discarded, because this
+       * node is part of the "final output". Nodes may depend on final nodes.
+       * 
+ * + * optional bool is_final = 7; + */ + public Builder setIsFinal(boolean value) { + + isFinal_ = value; + onChanged(); + return this; + } + /** + *
+       * If true, the output is permanent: it can't be discarded, because this
+       * node is part of the "final output". Nodes may depend on final nodes.
+       * 
+ * + * optional bool is_final = 7; + */ + public Builder clearIsFinal() { + + isFinal_ = false; + onChanged(); + return this; + } + + private java.util.List controlInput_ = java.util.Collections.emptyList(); + private void ensureControlInputIsMutable() { + if (!((bitField0_ & 0x00000100) == 0x00000100)) { + controlInput_ = new java.util.ArrayList(controlInput_); + bitField0_ |= 0x00000100; + } + } + /** + *
+       * Ids of the control inputs for this node.
+       * 
+ * + * repeated int32 control_input = 8; + */ + public java.util.List + getControlInputList() { + return java.util.Collections.unmodifiableList(controlInput_); + } + /** + *
+       * Ids of the control inputs for this node.
+       * 
+ * + * repeated int32 control_input = 8; + */ + public int getControlInputCount() { + return controlInput_.size(); + } + /** + *
+       * Ids of the control inputs for this node.
+       * 
+ * + * repeated int32 control_input = 8; + */ + public int getControlInput(int index) { + return controlInput_.get(index); + } + /** + *
+       * Ids of the control inputs for this node.
+       * 
+ * + * repeated int32 control_input = 8; + */ + public Builder setControlInput( + int index, int value) { + ensureControlInputIsMutable(); + controlInput_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * Ids of the control inputs for this node.
+       * 
+ * + * repeated int32 control_input = 8; + */ + public Builder addControlInput(int value) { + ensureControlInputIsMutable(); + controlInput_.add(value); + onChanged(); + return this; + } + /** + *
+       * Ids of the control inputs for this node.
+       * 
+ * + * repeated int32 control_input = 8; + */ + public Builder addAllControlInput( + java.lang.Iterable values) { + ensureControlInputIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, controlInput_); + onChanged(); + return this; + } + /** + *
+       * Ids of the control inputs for this node.
+       * 
+ * + * repeated int32 control_input = 8; + */ + public Builder clearControlInput() { + controlInput_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000100); + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.CostGraphDef.Node) + } + + // @@protoc_insertion_point(class_scope:tensorflow.CostGraphDef.Node) + private static final org.tensorflow.framework.CostGraphDef.Node DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.CostGraphDef.Node(); + } + + public static org.tensorflow.framework.CostGraphDef.Node getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Node parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Node(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.CostGraphDef.Node getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public static final int NODE_FIELD_NUMBER = 1; + private java.util.List node_; + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public java.util.List getNodeList() { + return node_; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public java.util.List + getNodeOrBuilderList() { + return node_; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public int getNodeCount() { + return node_.size(); + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public org.tensorflow.framework.CostGraphDef.Node getNode(int index) { + return node_.get(index); + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public org.tensorflow.framework.CostGraphDef.NodeOrBuilder getNodeOrBuilder( + int index) { + return node_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < node_.size(); i++) { + output.writeMessage(1, node_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < node_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, node_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.CostGraphDef)) { + return super.equals(obj); + } + org.tensorflow.framework.CostGraphDef other = (org.tensorflow.framework.CostGraphDef) obj; + + boolean result = true; + result = result && getNodeList() + .equals(other.getNodeList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getNodeCount() > 0) { + hash = (37 * hash) + NODE_FIELD_NUMBER; + hash = (53 * hash) + getNodeList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.CostGraphDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.CostGraphDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.CostGraphDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.CostGraphDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.CostGraphDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.CostGraphDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.CostGraphDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.CostGraphDef) + org.tensorflow.framework.CostGraphDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.CostGraphDef.class, org.tensorflow.framework.CostGraphDef.Builder.class); + } + + // Construct using org.tensorflow.framework.CostGraphDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getNodeFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (nodeBuilder_ == null) { + node_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + nodeBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.CostGraphProtos.internal_static_tensorflow_CostGraphDef_descriptor; + } + + public org.tensorflow.framework.CostGraphDef getDefaultInstanceForType() { + return org.tensorflow.framework.CostGraphDef.getDefaultInstance(); + } + + public org.tensorflow.framework.CostGraphDef build() { + org.tensorflow.framework.CostGraphDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.CostGraphDef buildPartial() { + org.tensorflow.framework.CostGraphDef result = new org.tensorflow.framework.CostGraphDef(this); + int from_bitField0_ = bitField0_; + if (nodeBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + node_ = java.util.Collections.unmodifiableList(node_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.node_ = node_; + } else { + result.node_ = nodeBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.CostGraphDef) { + return mergeFrom((org.tensorflow.framework.CostGraphDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.CostGraphDef other) { + if (other == org.tensorflow.framework.CostGraphDef.getDefaultInstance()) return this; + if (nodeBuilder_ == null) { + if (!other.node_.isEmpty()) { + if (node_.isEmpty()) { + node_ = other.node_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureNodeIsMutable(); + node_.addAll(other.node_); + } + onChanged(); + } + } else { + if (!other.node_.isEmpty()) { + if (nodeBuilder_.isEmpty()) { + nodeBuilder_.dispose(); + nodeBuilder_ = null; + node_ = other.node_; + bitField0_ = (bitField0_ & ~0x00000001); + nodeBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getNodeFieldBuilder() : null; + } else { + nodeBuilder_.addAllMessages(other.node_); + } + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.CostGraphDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.CostGraphDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List node_ = + java.util.Collections.emptyList(); + private void ensureNodeIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + node_ = new java.util.ArrayList(node_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.CostGraphDef.Node, org.tensorflow.framework.CostGraphDef.Node.Builder, org.tensorflow.framework.CostGraphDef.NodeOrBuilder> nodeBuilder_; + + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public java.util.List getNodeList() { + if (nodeBuilder_ == null) { + return java.util.Collections.unmodifiableList(node_); + } else { + return nodeBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public int getNodeCount() { + if (nodeBuilder_ == null) { + return node_.size(); + } else { + return nodeBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public org.tensorflow.framework.CostGraphDef.Node getNode(int index) { + if (nodeBuilder_ == null) { + return node_.get(index); + } else { + return nodeBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public Builder setNode( + int index, org.tensorflow.framework.CostGraphDef.Node value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeIsMutable(); + node_.set(index, value); + onChanged(); + } else { + nodeBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public Builder setNode( + int index, org.tensorflow.framework.CostGraphDef.Node.Builder builderForValue) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.set(index, builderForValue.build()); + onChanged(); + } else { + nodeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public Builder addNode(org.tensorflow.framework.CostGraphDef.Node value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeIsMutable(); + node_.add(value); + onChanged(); + } else { + nodeBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public Builder addNode( + int index, org.tensorflow.framework.CostGraphDef.Node value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeIsMutable(); + node_.add(index, value); + onChanged(); + } else { + nodeBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public Builder addNode( + org.tensorflow.framework.CostGraphDef.Node.Builder builderForValue) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.add(builderForValue.build()); + onChanged(); + } else { + nodeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public Builder addNode( + int index, org.tensorflow.framework.CostGraphDef.Node.Builder builderForValue) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.add(index, builderForValue.build()); + onChanged(); + } else { + nodeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public Builder addAllNode( + java.lang.Iterable values) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, node_); + onChanged(); + } else { + nodeBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public Builder clearNode() { + if (nodeBuilder_ == null) { + node_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + nodeBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public Builder removeNode(int index) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.remove(index); + onChanged(); + } else { + nodeBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public org.tensorflow.framework.CostGraphDef.Node.Builder getNodeBuilder( + int index) { + return getNodeFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public org.tensorflow.framework.CostGraphDef.NodeOrBuilder getNodeOrBuilder( + int index) { + if (nodeBuilder_ == null) { + return node_.get(index); } else { + return nodeBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public java.util.List + getNodeOrBuilderList() { + if (nodeBuilder_ != null) { + return nodeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(node_); + } + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public org.tensorflow.framework.CostGraphDef.Node.Builder addNodeBuilder() { + return getNodeFieldBuilder().addBuilder( + org.tensorflow.framework.CostGraphDef.Node.getDefaultInstance()); + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public org.tensorflow.framework.CostGraphDef.Node.Builder addNodeBuilder( + int index) { + return getNodeFieldBuilder().addBuilder( + index, org.tensorflow.framework.CostGraphDef.Node.getDefaultInstance()); + } + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + public java.util.List + getNodeBuilderList() { + return getNodeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.CostGraphDef.Node, org.tensorflow.framework.CostGraphDef.Node.Builder, org.tensorflow.framework.CostGraphDef.NodeOrBuilder> + getNodeFieldBuilder() { + if (nodeBuilder_ == null) { + nodeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.CostGraphDef.Node, org.tensorflow.framework.CostGraphDef.Node.Builder, org.tensorflow.framework.CostGraphDef.NodeOrBuilder>( + node_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + node_ = null; + } + return nodeBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.CostGraphDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.CostGraphDef) + private static final org.tensorflow.framework.CostGraphDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.CostGraphDef(); + } + + public static org.tensorflow.framework.CostGraphDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public CostGraphDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CostGraphDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.CostGraphDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphDefOrBuilder.java new file mode 100644 index 0000000000000..0d0fa13673f3d --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphDefOrBuilder.java @@ -0,0 +1,33 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/cost_graph.proto + +package org.tensorflow.framework; + +public interface CostGraphDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.CostGraphDef) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + java.util.List + getNodeList(); + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + org.tensorflow.framework.CostGraphDef.Node getNode(int index); + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + int getNodeCount(); + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + java.util.List + getNodeOrBuilderList(); + /** + * repeated .tensorflow.CostGraphDef.Node node = 1; + */ + org.tensorflow.framework.CostGraphDef.NodeOrBuilder getNodeOrBuilder( + int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphProtos.java new file mode 100644 index 0000000000000..422d6f55aa757 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/CostGraphProtos.java @@ -0,0 +1,109 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/cost_graph.proto + +package org.tensorflow.framework; + +public final class CostGraphProtos { + private CostGraphProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_CostGraphDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_CostGraphDef_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_CostGraphDef_Node_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_CostGraphDef_Node_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_CostGraphDef_Node_InputInfo_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_CostGraphDef_Node_InputInfo_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_CostGraphDef_Node_OutputInfo_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_CostGraphDef_Node_OutputInfo_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n*tensorflow/core/framework/cost_graph.p" + + "roto\022\ntensorflow\032,tensorflow/core/framew" + + "ork/tensor_shape.proto\032%tensorflow/core/" + + "framework/types.proto\"\216\004\n\014CostGraphDef\022+" + + "\n\004node\030\001 \003(\0132\035.tensorflow.CostGraphDef.N" + + "ode\032\320\003\n\004Node\022\014\n\004name\030\001 \001(\t\022\016\n\006device\030\002 \001" + + "(\t\022\n\n\002id\030\003 \001(\005\022;\n\ninput_info\030\004 \003(\0132\'.ten" + + "sorflow.CostGraphDef.Node.InputInfo\022=\n\013o" + + "utput_info\030\005 \003(\0132(.tensorflow.CostGraphD" + + "ef.Node.OutputInfo\022\035\n\025temporary_memory_s", + "ize\030\006 \001(\003\022\024\n\014compute_cost\030\t \001(\003\022\020\n\010is_fi" + + "nal\030\007 \001(\010\022\025\n\rcontrol_input\030\010 \003(\005\032;\n\tInpu" + + "tInfo\022\026\n\016preceding_node\030\001 \001(\005\022\026\n\016precedi" + + "ng_port\030\002 \001(\005\032\206\001\n\nOutputInfo\022\014\n\004size\030\001 \001" + + "(\003\022\030\n\020alias_input_port\030\002 \001(\003\022+\n\005shape\030\003 " + + "\001(\0132\034.tensorflow.TensorShapeProto\022#\n\005dty" + + "pe\030\004 \001(\0162\024.tensorflow.DataTypeB0\n\030org.te" + + "nsorflow.frameworkB\017CostGraphProtosP\001\370\001\001" + + "b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.TensorShapeProtos.getDescriptor(), + org.tensorflow.framework.TypesProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_CostGraphDef_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_CostGraphDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_CostGraphDef_descriptor, + new java.lang.String[] { "Node", }); + internal_static_tensorflow_CostGraphDef_Node_descriptor = + internal_static_tensorflow_CostGraphDef_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_CostGraphDef_Node_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_CostGraphDef_Node_descriptor, + new java.lang.String[] { "Name", "Device", "Id", "InputInfo", "OutputInfo", "TemporaryMemorySize", "ComputeCost", "IsFinal", "ControlInput", }); + internal_static_tensorflow_CostGraphDef_Node_InputInfo_descriptor = + internal_static_tensorflow_CostGraphDef_Node_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_CostGraphDef_Node_InputInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_CostGraphDef_Node_InputInfo_descriptor, + new java.lang.String[] { "PrecedingNode", "PrecedingPort", }); + internal_static_tensorflow_CostGraphDef_Node_OutputInfo_descriptor = + internal_static_tensorflow_CostGraphDef_Node_descriptor.getNestedTypes().get(1); + internal_static_tensorflow_CostGraphDef_Node_OutputInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_CostGraphDef_Node_OutputInfo_descriptor, + new java.lang.String[] { "Size", "AliasInputPort", "Shape", "Dtype", }); + org.tensorflow.framework.TensorShapeProtos.getDescriptor(); + org.tensorflow.framework.TypesProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DataType.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DataType.java new file mode 100644 index 0000000000000..be0155c0bb950 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DataType.java @@ -0,0 +1,553 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/types.proto + +package org.tensorflow.framework; + +/** + *
+ * LINT.IfChange
+ * 
+ * + * Protobuf enum {@code tensorflow.DataType} + */ +public enum DataType + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+   * Not a legal value for DataType.  Used to indicate a DataType field
+   * has not been set.
+   * 
+ * + * DT_INVALID = 0; + */ + DT_INVALID(0), + /** + *
+   * Data types that all computation devices are expected to be
+   * capable to support.
+   * 
+ * + * DT_FLOAT = 1; + */ + DT_FLOAT(1), + /** + * DT_DOUBLE = 2; + */ + DT_DOUBLE(2), + /** + * DT_INT32 = 3; + */ + DT_INT32(3), + /** + * DT_UINT8 = 4; + */ + DT_UINT8(4), + /** + * DT_INT16 = 5; + */ + DT_INT16(5), + /** + * DT_INT8 = 6; + */ + DT_INT8(6), + /** + * DT_STRING = 7; + */ + DT_STRING(7), + /** + *
+   * Single-precision complex
+   * 
+ * + * DT_COMPLEX64 = 8; + */ + DT_COMPLEX64(8), + /** + * DT_INT64 = 9; + */ + DT_INT64(9), + /** + * DT_BOOL = 10; + */ + DT_BOOL(10), + /** + *
+   * Quantized int8
+   * 
+ * + * DT_QINT8 = 11; + */ + DT_QINT8(11), + /** + *
+   * Quantized uint8
+   * 
+ * + * DT_QUINT8 = 12; + */ + DT_QUINT8(12), + /** + *
+   * Quantized int32
+   * 
+ * + * DT_QINT32 = 13; + */ + DT_QINT32(13), + /** + *
+   * Float32 truncated to 16 bits.  Only for cast ops.
+   * 
+ * + * DT_BFLOAT16 = 14; + */ + DT_BFLOAT16(14), + /** + *
+   * Quantized int16
+   * 
+ * + * DT_QINT16 = 15; + */ + DT_QINT16(15), + /** + *
+   * Quantized uint16
+   * 
+ * + * DT_QUINT16 = 16; + */ + DT_QUINT16(16), + /** + * DT_UINT16 = 17; + */ + DT_UINT16(17), + /** + *
+   * Double-precision complex
+   * 
+ * + * DT_COMPLEX128 = 18; + */ + DT_COMPLEX128(18), + /** + * DT_HALF = 19; + */ + DT_HALF(19), + /** + * DT_RESOURCE = 20; + */ + DT_RESOURCE(20), + /** + *
+   * Do not use!  These are only for parameters.  Every enum above
+   * should have a corresponding value below (verified by types_test).
+   * 
+ * + * DT_FLOAT_REF = 101; + */ + DT_FLOAT_REF(101), + /** + * DT_DOUBLE_REF = 102; + */ + DT_DOUBLE_REF(102), + /** + * DT_INT32_REF = 103; + */ + DT_INT32_REF(103), + /** + * DT_UINT8_REF = 104; + */ + DT_UINT8_REF(104), + /** + * DT_INT16_REF = 105; + */ + DT_INT16_REF(105), + /** + * DT_INT8_REF = 106; + */ + DT_INT8_REF(106), + /** + * DT_STRING_REF = 107; + */ + DT_STRING_REF(107), + /** + * DT_COMPLEX64_REF = 108; + */ + DT_COMPLEX64_REF(108), + /** + * DT_INT64_REF = 109; + */ + DT_INT64_REF(109), + /** + * DT_BOOL_REF = 110; + */ + DT_BOOL_REF(110), + /** + * DT_QINT8_REF = 111; + */ + DT_QINT8_REF(111), + /** + * DT_QUINT8_REF = 112; + */ + DT_QUINT8_REF(112), + /** + * DT_QINT32_REF = 113; + */ + DT_QINT32_REF(113), + /** + * DT_BFLOAT16_REF = 114; + */ + DT_BFLOAT16_REF(114), + /** + * DT_QINT16_REF = 115; + */ + DT_QINT16_REF(115), + /** + * DT_QUINT16_REF = 116; + */ + DT_QUINT16_REF(116), + /** + * DT_UINT16_REF = 117; + */ + DT_UINT16_REF(117), + /** + * DT_COMPLEX128_REF = 118; + */ + DT_COMPLEX128_REF(118), + /** + * DT_HALF_REF = 119; + */ + DT_HALF_REF(119), + /** + * DT_RESOURCE_REF = 120; + */ + DT_RESOURCE_REF(120), + UNRECOGNIZED(-1), + ; + + /** + *
+   * Not a legal value for DataType.  Used to indicate a DataType field
+   * has not been set.
+   * 
+ * + * DT_INVALID = 0; + */ + public static final int DT_INVALID_VALUE = 0; + /** + *
+   * Data types that all computation devices are expected to be
+   * capable to support.
+   * 
+ * + * DT_FLOAT = 1; + */ + public static final int DT_FLOAT_VALUE = 1; + /** + * DT_DOUBLE = 2; + */ + public static final int DT_DOUBLE_VALUE = 2; + /** + * DT_INT32 = 3; + */ + public static final int DT_INT32_VALUE = 3; + /** + * DT_UINT8 = 4; + */ + public static final int DT_UINT8_VALUE = 4; + /** + * DT_INT16 = 5; + */ + public static final int DT_INT16_VALUE = 5; + /** + * DT_INT8 = 6; + */ + public static final int DT_INT8_VALUE = 6; + /** + * DT_STRING = 7; + */ + public static final int DT_STRING_VALUE = 7; + /** + *
+   * Single-precision complex
+   * 
+ * + * DT_COMPLEX64 = 8; + */ + public static final int DT_COMPLEX64_VALUE = 8; + /** + * DT_INT64 = 9; + */ + public static final int DT_INT64_VALUE = 9; + /** + * DT_BOOL = 10; + */ + public static final int DT_BOOL_VALUE = 10; + /** + *
+   * Quantized int8
+   * 
+ * + * DT_QINT8 = 11; + */ + public static final int DT_QINT8_VALUE = 11; + /** + *
+   * Quantized uint8
+   * 
+ * + * DT_QUINT8 = 12; + */ + public static final int DT_QUINT8_VALUE = 12; + /** + *
+   * Quantized int32
+   * 
+ * + * DT_QINT32 = 13; + */ + public static final int DT_QINT32_VALUE = 13; + /** + *
+   * Float32 truncated to 16 bits.  Only for cast ops.
+   * 
+ * + * DT_BFLOAT16 = 14; + */ + public static final int DT_BFLOAT16_VALUE = 14; + /** + *
+   * Quantized int16
+   * 
+ * + * DT_QINT16 = 15; + */ + public static final int DT_QINT16_VALUE = 15; + /** + *
+   * Quantized uint16
+   * 
+ * + * DT_QUINT16 = 16; + */ + public static final int DT_QUINT16_VALUE = 16; + /** + * DT_UINT16 = 17; + */ + public static final int DT_UINT16_VALUE = 17; + /** + *
+   * Double-precision complex
+   * 
+ * + * DT_COMPLEX128 = 18; + */ + public static final int DT_COMPLEX128_VALUE = 18; + /** + * DT_HALF = 19; + */ + public static final int DT_HALF_VALUE = 19; + /** + * DT_RESOURCE = 20; + */ + public static final int DT_RESOURCE_VALUE = 20; + /** + *
+   * Do not use!  These are only for parameters.  Every enum above
+   * should have a corresponding value below (verified by types_test).
+   * 
+ * + * DT_FLOAT_REF = 101; + */ + public static final int DT_FLOAT_REF_VALUE = 101; + /** + * DT_DOUBLE_REF = 102; + */ + public static final int DT_DOUBLE_REF_VALUE = 102; + /** + * DT_INT32_REF = 103; + */ + public static final int DT_INT32_REF_VALUE = 103; + /** + * DT_UINT8_REF = 104; + */ + public static final int DT_UINT8_REF_VALUE = 104; + /** + * DT_INT16_REF = 105; + */ + public static final int DT_INT16_REF_VALUE = 105; + /** + * DT_INT8_REF = 106; + */ + public static final int DT_INT8_REF_VALUE = 106; + /** + * DT_STRING_REF = 107; + */ + public static final int DT_STRING_REF_VALUE = 107; + /** + * DT_COMPLEX64_REF = 108; + */ + public static final int DT_COMPLEX64_REF_VALUE = 108; + /** + * DT_INT64_REF = 109; + */ + public static final int DT_INT64_REF_VALUE = 109; + /** + * DT_BOOL_REF = 110; + */ + public static final int DT_BOOL_REF_VALUE = 110; + /** + * DT_QINT8_REF = 111; + */ + public static final int DT_QINT8_REF_VALUE = 111; + /** + * DT_QUINT8_REF = 112; + */ + public static final int DT_QUINT8_REF_VALUE = 112; + /** + * DT_QINT32_REF = 113; + */ + public static final int DT_QINT32_REF_VALUE = 113; + /** + * DT_BFLOAT16_REF = 114; + */ + public static final int DT_BFLOAT16_REF_VALUE = 114; + /** + * DT_QINT16_REF = 115; + */ + public static final int DT_QINT16_REF_VALUE = 115; + /** + * DT_QUINT16_REF = 116; + */ + public static final int DT_QUINT16_REF_VALUE = 116; + /** + * DT_UINT16_REF = 117; + */ + public static final int DT_UINT16_REF_VALUE = 117; + /** + * DT_COMPLEX128_REF = 118; + */ + public static final int DT_COMPLEX128_REF_VALUE = 118; + /** + * DT_HALF_REF = 119; + */ + public static final int DT_HALF_REF_VALUE = 119; + /** + * DT_RESOURCE_REF = 120; + */ + public static final int DT_RESOURCE_REF_VALUE = 120; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static DataType valueOf(int value) { + return forNumber(value); + } + + public static DataType forNumber(int value) { + switch (value) { + case 0: return DT_INVALID; + case 1: return DT_FLOAT; + case 2: return DT_DOUBLE; + case 3: return DT_INT32; + case 4: return DT_UINT8; + case 5: return DT_INT16; + case 6: return DT_INT8; + case 7: return DT_STRING; + case 8: return DT_COMPLEX64; + case 9: return DT_INT64; + case 10: return DT_BOOL; + case 11: return DT_QINT8; + case 12: return DT_QUINT8; + case 13: return DT_QINT32; + case 14: return DT_BFLOAT16; + case 15: return DT_QINT16; + case 16: return DT_QUINT16; + case 17: return DT_UINT16; + case 18: return DT_COMPLEX128; + case 19: return DT_HALF; + case 20: return DT_RESOURCE; + case 101: return DT_FLOAT_REF; + case 102: return DT_DOUBLE_REF; + case 103: return DT_INT32_REF; + case 104: return DT_UINT8_REF; + case 105: return DT_INT16_REF; + case 106: return DT_INT8_REF; + case 107: return DT_STRING_REF; + case 108: return DT_COMPLEX64_REF; + case 109: return DT_INT64_REF; + case 110: return DT_BOOL_REF; + case 111: return DT_QINT8_REF; + case 112: return DT_QUINT8_REF; + case 113: return DT_QINT32_REF; + case 114: return DT_BFLOAT16_REF; + case 115: return DT_QINT16_REF; + case 116: return DT_QUINT16_REF; + case 117: return DT_UINT16_REF; + case 118: return DT_COMPLEX128_REF; + case 119: return DT_HALF_REF; + case 120: return DT_RESOURCE_REF; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + DataType> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public DataType findValueByNumber(int number) { + return DataType.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.tensorflow.framework.TypesProtos.getDescriptor() + .getEnumTypes().get(0); + } + + private static final DataType[] VALUES = values(); + + public static DataType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private DataType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:tensorflow.DataType) +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DebugTensorWatch.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DebugTensorWatch.java new file mode 100644 index 0000000000000..c132107573963 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DebugTensorWatch.java @@ -0,0 +1,1134 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +/** + *
+ * EXPERIMENTAL. Option for watching a node.
+ * 
+ * + * Protobuf type {@code tensorflow.DebugTensorWatch} + */ +public final class DebugTensorWatch extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.DebugTensorWatch) + DebugTensorWatchOrBuilder { + // Use DebugTensorWatch.newBuilder() to construct. + private DebugTensorWatch(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DebugTensorWatch() { + nodeName_ = ""; + outputSlot_ = 0; + debugOps_ = com.google.protobuf.LazyStringArrayList.EMPTY; + debugUrls_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private DebugTensorWatch( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + nodeName_ = s; + break; + } + case 16: { + + outputSlot_ = input.readInt32(); + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + debugOps_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000004; + } + debugOps_.add(s); + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + debugUrls_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000008; + } + debugUrls_.add(s); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + debugOps_ = debugOps_.getUnmodifiableView(); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + debugUrls_ = debugUrls_.getUnmodifiableView(); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_DebugTensorWatch_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_DebugTensorWatch_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.DebugTensorWatch.class, org.tensorflow.framework.DebugTensorWatch.Builder.class); + } + + private int bitField0_; + public static final int NODE_NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object nodeName_; + /** + *
+   * Name of the node to watch.
+   * 
+ * + * optional string node_name = 1; + */ + public java.lang.String getNodeName() { + java.lang.Object ref = nodeName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nodeName_ = s; + return s; + } + } + /** + *
+   * Name of the node to watch.
+   * 
+ * + * optional string node_name = 1; + */ + public com.google.protobuf.ByteString + getNodeNameBytes() { + java.lang.Object ref = nodeName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + nodeName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int OUTPUT_SLOT_FIELD_NUMBER = 2; + private int outputSlot_; + /** + *
+   * Output slot to watch.
+   * The semantics of output_slot == -1 is that the node is only watched for
+   * completion, but not for any output tensors. See NodeCompletionCallback
+   * in debug_gateway.h.
+   * TODO(cais): Implement this semantics.
+   * 
+ * + * optional int32 output_slot = 2; + */ + public int getOutputSlot() { + return outputSlot_; + } + + public static final int DEBUG_OPS_FIELD_NUMBER = 3; + private com.google.protobuf.LazyStringList debugOps_; + /** + *
+   * Name(s) of the debugging op(s).
+   * One or more than one probes on a tensor.
+   * e.g., {"DebugIdentity", "DebugNanCount"}
+   * 
+ * + * repeated string debug_ops = 3; + */ + public com.google.protobuf.ProtocolStringList + getDebugOpsList() { + return debugOps_; + } + /** + *
+   * Name(s) of the debugging op(s).
+   * One or more than one probes on a tensor.
+   * e.g., {"DebugIdentity", "DebugNanCount"}
+   * 
+ * + * repeated string debug_ops = 3; + */ + public int getDebugOpsCount() { + return debugOps_.size(); + } + /** + *
+   * Name(s) of the debugging op(s).
+   * One or more than one probes on a tensor.
+   * e.g., {"DebugIdentity", "DebugNanCount"}
+   * 
+ * + * repeated string debug_ops = 3; + */ + public java.lang.String getDebugOps(int index) { + return debugOps_.get(index); + } + /** + *
+   * Name(s) of the debugging op(s).
+   * One or more than one probes on a tensor.
+   * e.g., {"DebugIdentity", "DebugNanCount"}
+   * 
+ * + * repeated string debug_ops = 3; + */ + public com.google.protobuf.ByteString + getDebugOpsBytes(int index) { + return debugOps_.getByteString(index); + } + + public static final int DEBUG_URLS_FIELD_NUMBER = 4; + private com.google.protobuf.LazyStringList debugUrls_; + /** + *
+   * URL(s) for debug targets(s).
+   *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+   * Each debug op listed in debug_ops will publish its output tensor (debug
+   * signal) to all URLs in debug_urls.
+   * 
+ * + * repeated string debug_urls = 4; + */ + public com.google.protobuf.ProtocolStringList + getDebugUrlsList() { + return debugUrls_; + } + /** + *
+   * URL(s) for debug targets(s).
+   *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+   * Each debug op listed in debug_ops will publish its output tensor (debug
+   * signal) to all URLs in debug_urls.
+   * 
+ * + * repeated string debug_urls = 4; + */ + public int getDebugUrlsCount() { + return debugUrls_.size(); + } + /** + *
+   * URL(s) for debug targets(s).
+   *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+   * Each debug op listed in debug_ops will publish its output tensor (debug
+   * signal) to all URLs in debug_urls.
+   * 
+ * + * repeated string debug_urls = 4; + */ + public java.lang.String getDebugUrls(int index) { + return debugUrls_.get(index); + } + /** + *
+   * URL(s) for debug targets(s).
+   *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+   * Each debug op listed in debug_ops will publish its output tensor (debug
+   * signal) to all URLs in debug_urls.
+   * 
+ * + * repeated string debug_urls = 4; + */ + public com.google.protobuf.ByteString + getDebugUrlsBytes(int index) { + return debugUrls_.getByteString(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNodeNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, nodeName_); + } + if (outputSlot_ != 0) { + output.writeInt32(2, outputSlot_); + } + for (int i = 0; i < debugOps_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, debugOps_.getRaw(i)); + } + for (int i = 0; i < debugUrls_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, debugUrls_.getRaw(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNodeNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, nodeName_); + } + if (outputSlot_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, outputSlot_); + } + { + int dataSize = 0; + for (int i = 0; i < debugOps_.size(); i++) { + dataSize += computeStringSizeNoTag(debugOps_.getRaw(i)); + } + size += dataSize; + size += 1 * getDebugOpsList().size(); + } + { + int dataSize = 0; + for (int i = 0; i < debugUrls_.size(); i++) { + dataSize += computeStringSizeNoTag(debugUrls_.getRaw(i)); + } + size += dataSize; + size += 1 * getDebugUrlsList().size(); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.DebugTensorWatch)) { + return super.equals(obj); + } + org.tensorflow.framework.DebugTensorWatch other = (org.tensorflow.framework.DebugTensorWatch) obj; + + boolean result = true; + result = result && getNodeName() + .equals(other.getNodeName()); + result = result && (getOutputSlot() + == other.getOutputSlot()); + result = result && getDebugOpsList() + .equals(other.getDebugOpsList()); + result = result && getDebugUrlsList() + .equals(other.getDebugUrlsList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NODE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getNodeName().hashCode(); + hash = (37 * hash) + OUTPUT_SLOT_FIELD_NUMBER; + hash = (53 * hash) + getOutputSlot(); + if (getDebugOpsCount() > 0) { + hash = (37 * hash) + DEBUG_OPS_FIELD_NUMBER; + hash = (53 * hash) + getDebugOpsList().hashCode(); + } + if (getDebugUrlsCount() > 0) { + hash = (37 * hash) + DEBUG_URLS_FIELD_NUMBER; + hash = (53 * hash) + getDebugUrlsList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.DebugTensorWatch parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.DebugTensorWatch parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.DebugTensorWatch parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.DebugTensorWatch parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.DebugTensorWatch parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DebugTensorWatch parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.DebugTensorWatch parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DebugTensorWatch parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.DebugTensorWatch parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DebugTensorWatch parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.DebugTensorWatch prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * EXPERIMENTAL. Option for watching a node.
+   * 
+ * + * Protobuf type {@code tensorflow.DebugTensorWatch} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.DebugTensorWatch) + org.tensorflow.framework.DebugTensorWatchOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_DebugTensorWatch_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_DebugTensorWatch_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.DebugTensorWatch.class, org.tensorflow.framework.DebugTensorWatch.Builder.class); + } + + // Construct using org.tensorflow.framework.DebugTensorWatch.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + nodeName_ = ""; + + outputSlot_ = 0; + + debugOps_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + debugUrls_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_DebugTensorWatch_descriptor; + } + + public org.tensorflow.framework.DebugTensorWatch getDefaultInstanceForType() { + return org.tensorflow.framework.DebugTensorWatch.getDefaultInstance(); + } + + public org.tensorflow.framework.DebugTensorWatch build() { + org.tensorflow.framework.DebugTensorWatch result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.DebugTensorWatch buildPartial() { + org.tensorflow.framework.DebugTensorWatch result = new org.tensorflow.framework.DebugTensorWatch(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.nodeName_ = nodeName_; + result.outputSlot_ = outputSlot_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + debugOps_ = debugOps_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.debugOps_ = debugOps_; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + debugUrls_ = debugUrls_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.debugUrls_ = debugUrls_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.DebugTensorWatch) { + return mergeFrom((org.tensorflow.framework.DebugTensorWatch)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.DebugTensorWatch other) { + if (other == org.tensorflow.framework.DebugTensorWatch.getDefaultInstance()) return this; + if (!other.getNodeName().isEmpty()) { + nodeName_ = other.nodeName_; + onChanged(); + } + if (other.getOutputSlot() != 0) { + setOutputSlot(other.getOutputSlot()); + } + if (!other.debugOps_.isEmpty()) { + if (debugOps_.isEmpty()) { + debugOps_ = other.debugOps_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureDebugOpsIsMutable(); + debugOps_.addAll(other.debugOps_); + } + onChanged(); + } + if (!other.debugUrls_.isEmpty()) { + if (debugUrls_.isEmpty()) { + debugUrls_ = other.debugUrls_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureDebugUrlsIsMutable(); + debugUrls_.addAll(other.debugUrls_); + } + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.DebugTensorWatch parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.DebugTensorWatch) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object nodeName_ = ""; + /** + *
+     * Name of the node to watch.
+     * 
+ * + * optional string node_name = 1; + */ + public java.lang.String getNodeName() { + java.lang.Object ref = nodeName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nodeName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the node to watch.
+     * 
+ * + * optional string node_name = 1; + */ + public com.google.protobuf.ByteString + getNodeNameBytes() { + java.lang.Object ref = nodeName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + nodeName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the node to watch.
+     * 
+ * + * optional string node_name = 1; + */ + public Builder setNodeName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + nodeName_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the node to watch.
+     * 
+ * + * optional string node_name = 1; + */ + public Builder clearNodeName() { + + nodeName_ = getDefaultInstance().getNodeName(); + onChanged(); + return this; + } + /** + *
+     * Name of the node to watch.
+     * 
+ * + * optional string node_name = 1; + */ + public Builder setNodeNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + nodeName_ = value; + onChanged(); + return this; + } + + private int outputSlot_ ; + /** + *
+     * Output slot to watch.
+     * The semantics of output_slot == -1 is that the node is only watched for
+     * completion, but not for any output tensors. See NodeCompletionCallback
+     * in debug_gateway.h.
+     * TODO(cais): Implement this semantics.
+     * 
+ * + * optional int32 output_slot = 2; + */ + public int getOutputSlot() { + return outputSlot_; + } + /** + *
+     * Output slot to watch.
+     * The semantics of output_slot == -1 is that the node is only watched for
+     * completion, but not for any output tensors. See NodeCompletionCallback
+     * in debug_gateway.h.
+     * TODO(cais): Implement this semantics.
+     * 
+ * + * optional int32 output_slot = 2; + */ + public Builder setOutputSlot(int value) { + + outputSlot_ = value; + onChanged(); + return this; + } + /** + *
+     * Output slot to watch.
+     * The semantics of output_slot == -1 is that the node is only watched for
+     * completion, but not for any output tensors. See NodeCompletionCallback
+     * in debug_gateway.h.
+     * TODO(cais): Implement this semantics.
+     * 
+ * + * optional int32 output_slot = 2; + */ + public Builder clearOutputSlot() { + + outputSlot_ = 0; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList debugOps_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureDebugOpsIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + debugOps_ = new com.google.protobuf.LazyStringArrayList(debugOps_); + bitField0_ |= 0x00000004; + } + } + /** + *
+     * Name(s) of the debugging op(s).
+     * One or more than one probes on a tensor.
+     * e.g., {"DebugIdentity", "DebugNanCount"}
+     * 
+ * + * repeated string debug_ops = 3; + */ + public com.google.protobuf.ProtocolStringList + getDebugOpsList() { + return debugOps_.getUnmodifiableView(); + } + /** + *
+     * Name(s) of the debugging op(s).
+     * One or more than one probes on a tensor.
+     * e.g., {"DebugIdentity", "DebugNanCount"}
+     * 
+ * + * repeated string debug_ops = 3; + */ + public int getDebugOpsCount() { + return debugOps_.size(); + } + /** + *
+     * Name(s) of the debugging op(s).
+     * One or more than one probes on a tensor.
+     * e.g., {"DebugIdentity", "DebugNanCount"}
+     * 
+ * + * repeated string debug_ops = 3; + */ + public java.lang.String getDebugOps(int index) { + return debugOps_.get(index); + } + /** + *
+     * Name(s) of the debugging op(s).
+     * One or more than one probes on a tensor.
+     * e.g., {"DebugIdentity", "DebugNanCount"}
+     * 
+ * + * repeated string debug_ops = 3; + */ + public com.google.protobuf.ByteString + getDebugOpsBytes(int index) { + return debugOps_.getByteString(index); + } + /** + *
+     * Name(s) of the debugging op(s).
+     * One or more than one probes on a tensor.
+     * e.g., {"DebugIdentity", "DebugNanCount"}
+     * 
+ * + * repeated string debug_ops = 3; + */ + public Builder setDebugOps( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureDebugOpsIsMutable(); + debugOps_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Name(s) of the debugging op(s).
+     * One or more than one probes on a tensor.
+     * e.g., {"DebugIdentity", "DebugNanCount"}
+     * 
+ * + * repeated string debug_ops = 3; + */ + public Builder addDebugOps( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureDebugOpsIsMutable(); + debugOps_.add(value); + onChanged(); + return this; + } + /** + *
+     * Name(s) of the debugging op(s).
+     * One or more than one probes on a tensor.
+     * e.g., {"DebugIdentity", "DebugNanCount"}
+     * 
+ * + * repeated string debug_ops = 3; + */ + public Builder addAllDebugOps( + java.lang.Iterable values) { + ensureDebugOpsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, debugOps_); + onChanged(); + return this; + } + /** + *
+     * Name(s) of the debugging op(s).
+     * One or more than one probes on a tensor.
+     * e.g., {"DebugIdentity", "DebugNanCount"}
+     * 
+ * + * repeated string debug_ops = 3; + */ + public Builder clearDebugOps() { + debugOps_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+     * Name(s) of the debugging op(s).
+     * One or more than one probes on a tensor.
+     * e.g., {"DebugIdentity", "DebugNanCount"}
+     * 
+ * + * repeated string debug_ops = 3; + */ + public Builder addDebugOpsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureDebugOpsIsMutable(); + debugOps_.add(value); + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList debugUrls_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureDebugUrlsIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + debugUrls_ = new com.google.protobuf.LazyStringArrayList(debugUrls_); + bitField0_ |= 0x00000008; + } + } + /** + *
+     * URL(s) for debug targets(s).
+     *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+     * Each debug op listed in debug_ops will publish its output tensor (debug
+     * signal) to all URLs in debug_urls.
+     * 
+ * + * repeated string debug_urls = 4; + */ + public com.google.protobuf.ProtocolStringList + getDebugUrlsList() { + return debugUrls_.getUnmodifiableView(); + } + /** + *
+     * URL(s) for debug targets(s).
+     *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+     * Each debug op listed in debug_ops will publish its output tensor (debug
+     * signal) to all URLs in debug_urls.
+     * 
+ * + * repeated string debug_urls = 4; + */ + public int getDebugUrlsCount() { + return debugUrls_.size(); + } + /** + *
+     * URL(s) for debug targets(s).
+     *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+     * Each debug op listed in debug_ops will publish its output tensor (debug
+     * signal) to all URLs in debug_urls.
+     * 
+ * + * repeated string debug_urls = 4; + */ + public java.lang.String getDebugUrls(int index) { + return debugUrls_.get(index); + } + /** + *
+     * URL(s) for debug targets(s).
+     *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+     * Each debug op listed in debug_ops will publish its output tensor (debug
+     * signal) to all URLs in debug_urls.
+     * 
+ * + * repeated string debug_urls = 4; + */ + public com.google.protobuf.ByteString + getDebugUrlsBytes(int index) { + return debugUrls_.getByteString(index); + } + /** + *
+     * URL(s) for debug targets(s).
+     *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+     * Each debug op listed in debug_ops will publish its output tensor (debug
+     * signal) to all URLs in debug_urls.
+     * 
+ * + * repeated string debug_urls = 4; + */ + public Builder setDebugUrls( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureDebugUrlsIsMutable(); + debugUrls_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * URL(s) for debug targets(s).
+     *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+     * Each debug op listed in debug_ops will publish its output tensor (debug
+     * signal) to all URLs in debug_urls.
+     * 
+ * + * repeated string debug_urls = 4; + */ + public Builder addDebugUrls( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureDebugUrlsIsMutable(); + debugUrls_.add(value); + onChanged(); + return this; + } + /** + *
+     * URL(s) for debug targets(s).
+     *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+     * Each debug op listed in debug_ops will publish its output tensor (debug
+     * signal) to all URLs in debug_urls.
+     * 
+ * + * repeated string debug_urls = 4; + */ + public Builder addAllDebugUrls( + java.lang.Iterable values) { + ensureDebugUrlsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, debugUrls_); + onChanged(); + return this; + } + /** + *
+     * URL(s) for debug targets(s).
+     *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+     * Each debug op listed in debug_ops will publish its output tensor (debug
+     * signal) to all URLs in debug_urls.
+     * 
+ * + * repeated string debug_urls = 4; + */ + public Builder clearDebugUrls() { + debugUrls_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + *
+     * URL(s) for debug targets(s).
+     *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+     * Each debug op listed in debug_ops will publish its output tensor (debug
+     * signal) to all URLs in debug_urls.
+     * 
+ * + * repeated string debug_urls = 4; + */ + public Builder addDebugUrlsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureDebugUrlsIsMutable(); + debugUrls_.add(value); + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.DebugTensorWatch) + } + + // @@protoc_insertion_point(class_scope:tensorflow.DebugTensorWatch) + private static final org.tensorflow.framework.DebugTensorWatch DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.DebugTensorWatch(); + } + + public static org.tensorflow.framework.DebugTensorWatch getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DebugTensorWatch parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DebugTensorWatch(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.DebugTensorWatch getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DebugTensorWatchOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DebugTensorWatchOrBuilder.java new file mode 100644 index 0000000000000..d74eedb339dde --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DebugTensorWatchOrBuilder.java @@ -0,0 +1,130 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +public interface DebugTensorWatchOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.DebugTensorWatch) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Name of the node to watch.
+   * 
+ * + * optional string node_name = 1; + */ + java.lang.String getNodeName(); + /** + *
+   * Name of the node to watch.
+   * 
+ * + * optional string node_name = 1; + */ + com.google.protobuf.ByteString + getNodeNameBytes(); + + /** + *
+   * Output slot to watch.
+   * The semantics of output_slot == -1 is that the node is only watched for
+   * completion, but not for any output tensors. See NodeCompletionCallback
+   * in debug_gateway.h.
+   * TODO(cais): Implement this semantics.
+   * 
+ * + * optional int32 output_slot = 2; + */ + int getOutputSlot(); + + /** + *
+   * Name(s) of the debugging op(s).
+   * One or more than one probes on a tensor.
+   * e.g., {"DebugIdentity", "DebugNanCount"}
+   * 
+ * + * repeated string debug_ops = 3; + */ + java.util.List + getDebugOpsList(); + /** + *
+   * Name(s) of the debugging op(s).
+   * One or more than one probes on a tensor.
+   * e.g., {"DebugIdentity", "DebugNanCount"}
+   * 
+ * + * repeated string debug_ops = 3; + */ + int getDebugOpsCount(); + /** + *
+   * Name(s) of the debugging op(s).
+   * One or more than one probes on a tensor.
+   * e.g., {"DebugIdentity", "DebugNanCount"}
+   * 
+ * + * repeated string debug_ops = 3; + */ + java.lang.String getDebugOps(int index); + /** + *
+   * Name(s) of the debugging op(s).
+   * One or more than one probes on a tensor.
+   * e.g., {"DebugIdentity", "DebugNanCount"}
+   * 
+ * + * repeated string debug_ops = 3; + */ + com.google.protobuf.ByteString + getDebugOpsBytes(int index); + + /** + *
+   * URL(s) for debug targets(s).
+   *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+   * Each debug op listed in debug_ops will publish its output tensor (debug
+   * signal) to all URLs in debug_urls.
+   * 
+ * + * repeated string debug_urls = 4; + */ + java.util.List + getDebugUrlsList(); + /** + *
+   * URL(s) for debug targets(s).
+   *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+   * Each debug op listed in debug_ops will publish its output tensor (debug
+   * signal) to all URLs in debug_urls.
+   * 
+ * + * repeated string debug_urls = 4; + */ + int getDebugUrlsCount(); + /** + *
+   * URL(s) for debug targets(s).
+   *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+   * Each debug op listed in debug_ops will publish its output tensor (debug
+   * signal) to all URLs in debug_urls.
+   * 
+ * + * repeated string debug_urls = 4; + */ + java.lang.String getDebugUrls(int index); + /** + *
+   * URL(s) for debug targets(s).
+   *   E.g., "file:///foo/tfdbg_dump", "grpc://localhost:11011"
+   * Each debug op listed in debug_ops will publish its output tensor (debug
+   * signal) to all URLs in debug_urls.
+   * 
+ * + * repeated string debug_urls = 4; + */ + com.google.protobuf.ByteString + getDebugUrlsBytes(int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributes.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributes.java new file mode 100644 index 0000000000000..bd6b86ff3894b --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributes.java @@ -0,0 +1,1227 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/device_attributes.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.DeviceAttributes} + */ +public final class DeviceAttributes extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.DeviceAttributes) + DeviceAttributesOrBuilder { + // Use DeviceAttributes.newBuilder() to construct. + private DeviceAttributes(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DeviceAttributes() { + name_ = ""; + deviceType_ = ""; + memoryLimit_ = 0L; + incarnation_ = 0L; + physicalDeviceDesc_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private DeviceAttributes( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + deviceType_ = s; + break; + } + case 32: { + + memoryLimit_ = input.readInt64(); + break; + } + case 42: { + org.tensorflow.framework.DeviceLocality.Builder subBuilder = null; + if (locality_ != null) { + subBuilder = locality_.toBuilder(); + } + locality_ = input.readMessage(org.tensorflow.framework.DeviceLocality.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(locality_); + locality_ = subBuilder.buildPartial(); + } + + break; + } + case 49: { + + incarnation_ = input.readFixed64(); + break; + } + case 58: { + java.lang.String s = input.readStringRequireUtf8(); + + physicalDeviceDesc_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceAttributes_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceAttributes_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.DeviceAttributes.class, org.tensorflow.framework.DeviceAttributes.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * Fully specified name of the device within a cluster.
+   * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * Fully specified name of the device within a cluster.
+   * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DEVICE_TYPE_FIELD_NUMBER = 2; + private volatile java.lang.Object deviceType_; + /** + *
+   * String representation of device_type.
+   * 
+ * + * optional string device_type = 2; + */ + public java.lang.String getDeviceType() { + java.lang.Object ref = deviceType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + deviceType_ = s; + return s; + } + } + /** + *
+   * String representation of device_type.
+   * 
+ * + * optional string device_type = 2; + */ + public com.google.protobuf.ByteString + getDeviceTypeBytes() { + java.lang.Object ref = deviceType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + deviceType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int MEMORY_LIMIT_FIELD_NUMBER = 4; + private long memoryLimit_; + /** + *
+   * Memory capacity of device in bytes.
+   * 
+ * + * optional int64 memory_limit = 4; + */ + public long getMemoryLimit() { + return memoryLimit_; + } + + public static final int LOCALITY_FIELD_NUMBER = 5; + private org.tensorflow.framework.DeviceLocality locality_; + /** + *
+   * Platform-specific data about device that may be useful
+   * for supporting efficient data transfers.
+   * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public boolean hasLocality() { + return locality_ != null; + } + /** + *
+   * Platform-specific data about device that may be useful
+   * for supporting efficient data transfers.
+   * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public org.tensorflow.framework.DeviceLocality getLocality() { + return locality_ == null ? org.tensorflow.framework.DeviceLocality.getDefaultInstance() : locality_; + } + /** + *
+   * Platform-specific data about device that may be useful
+   * for supporting efficient data transfers.
+   * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public org.tensorflow.framework.DeviceLocalityOrBuilder getLocalityOrBuilder() { + return getLocality(); + } + + public static final int INCARNATION_FIELD_NUMBER = 6; + private long incarnation_; + /** + *
+   * A device is assigned a global unique number each time it is
+   * initialized. "incarnation" should never be 0.
+   * 
+ * + * optional fixed64 incarnation = 6; + */ + public long getIncarnation() { + return incarnation_; + } + + public static final int PHYSICAL_DEVICE_DESC_FIELD_NUMBER = 7; + private volatile java.lang.Object physicalDeviceDesc_; + /** + *
+   * String representation of the physical device that this device maps to.
+   * 
+ * + * optional string physical_device_desc = 7; + */ + public java.lang.String getPhysicalDeviceDesc() { + java.lang.Object ref = physicalDeviceDesc_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + physicalDeviceDesc_ = s; + return s; + } + } + /** + *
+   * String representation of the physical device that this device maps to.
+   * 
+ * + * optional string physical_device_desc = 7; + */ + public com.google.protobuf.ByteString + getPhysicalDeviceDescBytes() { + java.lang.Object ref = physicalDeviceDesc_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + physicalDeviceDesc_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (!getDeviceTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, deviceType_); + } + if (memoryLimit_ != 0L) { + output.writeInt64(4, memoryLimit_); + } + if (locality_ != null) { + output.writeMessage(5, getLocality()); + } + if (incarnation_ != 0L) { + output.writeFixed64(6, incarnation_); + } + if (!getPhysicalDeviceDescBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 7, physicalDeviceDesc_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (!getDeviceTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, deviceType_); + } + if (memoryLimit_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(4, memoryLimit_); + } + if (locality_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, getLocality()); + } + if (incarnation_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeFixed64Size(6, incarnation_); + } + if (!getPhysicalDeviceDescBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, physicalDeviceDesc_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.DeviceAttributes)) { + return super.equals(obj); + } + org.tensorflow.framework.DeviceAttributes other = (org.tensorflow.framework.DeviceAttributes) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && getDeviceType() + .equals(other.getDeviceType()); + result = result && (getMemoryLimit() + == other.getMemoryLimit()); + result = result && (hasLocality() == other.hasLocality()); + if (hasLocality()) { + result = result && getLocality() + .equals(other.getLocality()); + } + result = result && (getIncarnation() + == other.getIncarnation()); + result = result && getPhysicalDeviceDesc() + .equals(other.getPhysicalDeviceDesc()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + DEVICE_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getDeviceType().hashCode(); + hash = (37 * hash) + MEMORY_LIMIT_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getMemoryLimit()); + if (hasLocality()) { + hash = (37 * hash) + LOCALITY_FIELD_NUMBER; + hash = (53 * hash) + getLocality().hashCode(); + } + hash = (37 * hash) + INCARNATION_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getIncarnation()); + hash = (37 * hash) + PHYSICAL_DEVICE_DESC_FIELD_NUMBER; + hash = (53 * hash) + getPhysicalDeviceDesc().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.DeviceAttributes parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.DeviceAttributes parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.DeviceAttributes parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.DeviceAttributes parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.DeviceAttributes parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DeviceAttributes parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.DeviceAttributes parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DeviceAttributes parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.DeviceAttributes parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DeviceAttributes parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.DeviceAttributes prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.DeviceAttributes} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.DeviceAttributes) + org.tensorflow.framework.DeviceAttributesOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceAttributes_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceAttributes_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.DeviceAttributes.class, org.tensorflow.framework.DeviceAttributes.Builder.class); + } + + // Construct using org.tensorflow.framework.DeviceAttributes.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + deviceType_ = ""; + + memoryLimit_ = 0L; + + if (localityBuilder_ == null) { + locality_ = null; + } else { + locality_ = null; + localityBuilder_ = null; + } + incarnation_ = 0L; + + physicalDeviceDesc_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceAttributes_descriptor; + } + + public org.tensorflow.framework.DeviceAttributes getDefaultInstanceForType() { + return org.tensorflow.framework.DeviceAttributes.getDefaultInstance(); + } + + public org.tensorflow.framework.DeviceAttributes build() { + org.tensorflow.framework.DeviceAttributes result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.DeviceAttributes buildPartial() { + org.tensorflow.framework.DeviceAttributes result = new org.tensorflow.framework.DeviceAttributes(this); + result.name_ = name_; + result.deviceType_ = deviceType_; + result.memoryLimit_ = memoryLimit_; + if (localityBuilder_ == null) { + result.locality_ = locality_; + } else { + result.locality_ = localityBuilder_.build(); + } + result.incarnation_ = incarnation_; + result.physicalDeviceDesc_ = physicalDeviceDesc_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.DeviceAttributes) { + return mergeFrom((org.tensorflow.framework.DeviceAttributes)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.DeviceAttributes other) { + if (other == org.tensorflow.framework.DeviceAttributes.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.getDeviceType().isEmpty()) { + deviceType_ = other.deviceType_; + onChanged(); + } + if (other.getMemoryLimit() != 0L) { + setMemoryLimit(other.getMemoryLimit()); + } + if (other.hasLocality()) { + mergeLocality(other.getLocality()); + } + if (other.getIncarnation() != 0L) { + setIncarnation(other.getIncarnation()); + } + if (!other.getPhysicalDeviceDesc().isEmpty()) { + physicalDeviceDesc_ = other.physicalDeviceDesc_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.DeviceAttributes parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.DeviceAttributes) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+     * Fully specified name of the device within a cluster.
+     * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Fully specified name of the device within a cluster.
+     * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Fully specified name of the device within a cluster.
+     * 
+ * + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * Fully specified name of the device within a cluster.
+     * 
+ * + * optional string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * Fully specified name of the device within a cluster.
+     * 
+ * + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.lang.Object deviceType_ = ""; + /** + *
+     * String representation of device_type.
+     * 
+ * + * optional string device_type = 2; + */ + public java.lang.String getDeviceType() { + java.lang.Object ref = deviceType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + deviceType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * String representation of device_type.
+     * 
+ * + * optional string device_type = 2; + */ + public com.google.protobuf.ByteString + getDeviceTypeBytes() { + java.lang.Object ref = deviceType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + deviceType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * String representation of device_type.
+     * 
+ * + * optional string device_type = 2; + */ + public Builder setDeviceType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + deviceType_ = value; + onChanged(); + return this; + } + /** + *
+     * String representation of device_type.
+     * 
+ * + * optional string device_type = 2; + */ + public Builder clearDeviceType() { + + deviceType_ = getDefaultInstance().getDeviceType(); + onChanged(); + return this; + } + /** + *
+     * String representation of device_type.
+     * 
+ * + * optional string device_type = 2; + */ + public Builder setDeviceTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + deviceType_ = value; + onChanged(); + return this; + } + + private long memoryLimit_ ; + /** + *
+     * Memory capacity of device in bytes.
+     * 
+ * + * optional int64 memory_limit = 4; + */ + public long getMemoryLimit() { + return memoryLimit_; + } + /** + *
+     * Memory capacity of device in bytes.
+     * 
+ * + * optional int64 memory_limit = 4; + */ + public Builder setMemoryLimit(long value) { + + memoryLimit_ = value; + onChanged(); + return this; + } + /** + *
+     * Memory capacity of device in bytes.
+     * 
+ * + * optional int64 memory_limit = 4; + */ + public Builder clearMemoryLimit() { + + memoryLimit_ = 0L; + onChanged(); + return this; + } + + private org.tensorflow.framework.DeviceLocality locality_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.DeviceLocality, org.tensorflow.framework.DeviceLocality.Builder, org.tensorflow.framework.DeviceLocalityOrBuilder> localityBuilder_; + /** + *
+     * Platform-specific data about device that may be useful
+     * for supporting efficient data transfers.
+     * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public boolean hasLocality() { + return localityBuilder_ != null || locality_ != null; + } + /** + *
+     * Platform-specific data about device that may be useful
+     * for supporting efficient data transfers.
+     * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public org.tensorflow.framework.DeviceLocality getLocality() { + if (localityBuilder_ == null) { + return locality_ == null ? org.tensorflow.framework.DeviceLocality.getDefaultInstance() : locality_; + } else { + return localityBuilder_.getMessage(); + } + } + /** + *
+     * Platform-specific data about device that may be useful
+     * for supporting efficient data transfers.
+     * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public Builder setLocality(org.tensorflow.framework.DeviceLocality value) { + if (localityBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + locality_ = value; + onChanged(); + } else { + localityBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Platform-specific data about device that may be useful
+     * for supporting efficient data transfers.
+     * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public Builder setLocality( + org.tensorflow.framework.DeviceLocality.Builder builderForValue) { + if (localityBuilder_ == null) { + locality_ = builderForValue.build(); + onChanged(); + } else { + localityBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Platform-specific data about device that may be useful
+     * for supporting efficient data transfers.
+     * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public Builder mergeLocality(org.tensorflow.framework.DeviceLocality value) { + if (localityBuilder_ == null) { + if (locality_ != null) { + locality_ = + org.tensorflow.framework.DeviceLocality.newBuilder(locality_).mergeFrom(value).buildPartial(); + } else { + locality_ = value; + } + onChanged(); + } else { + localityBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Platform-specific data about device that may be useful
+     * for supporting efficient data transfers.
+     * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public Builder clearLocality() { + if (localityBuilder_ == null) { + locality_ = null; + onChanged(); + } else { + locality_ = null; + localityBuilder_ = null; + } + + return this; + } + /** + *
+     * Platform-specific data about device that may be useful
+     * for supporting efficient data transfers.
+     * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public org.tensorflow.framework.DeviceLocality.Builder getLocalityBuilder() { + + onChanged(); + return getLocalityFieldBuilder().getBuilder(); + } + /** + *
+     * Platform-specific data about device that may be useful
+     * for supporting efficient data transfers.
+     * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + public org.tensorflow.framework.DeviceLocalityOrBuilder getLocalityOrBuilder() { + if (localityBuilder_ != null) { + return localityBuilder_.getMessageOrBuilder(); + } else { + return locality_ == null ? + org.tensorflow.framework.DeviceLocality.getDefaultInstance() : locality_; + } + } + /** + *
+     * Platform-specific data about device that may be useful
+     * for supporting efficient data transfers.
+     * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.DeviceLocality, org.tensorflow.framework.DeviceLocality.Builder, org.tensorflow.framework.DeviceLocalityOrBuilder> + getLocalityFieldBuilder() { + if (localityBuilder_ == null) { + localityBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.DeviceLocality, org.tensorflow.framework.DeviceLocality.Builder, org.tensorflow.framework.DeviceLocalityOrBuilder>( + getLocality(), + getParentForChildren(), + isClean()); + locality_ = null; + } + return localityBuilder_; + } + + private long incarnation_ ; + /** + *
+     * A device is assigned a global unique number each time it is
+     * initialized. "incarnation" should never be 0.
+     * 
+ * + * optional fixed64 incarnation = 6; + */ + public long getIncarnation() { + return incarnation_; + } + /** + *
+     * A device is assigned a global unique number each time it is
+     * initialized. "incarnation" should never be 0.
+     * 
+ * + * optional fixed64 incarnation = 6; + */ + public Builder setIncarnation(long value) { + + incarnation_ = value; + onChanged(); + return this; + } + /** + *
+     * A device is assigned a global unique number each time it is
+     * initialized. "incarnation" should never be 0.
+     * 
+ * + * optional fixed64 incarnation = 6; + */ + public Builder clearIncarnation() { + + incarnation_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object physicalDeviceDesc_ = ""; + /** + *
+     * String representation of the physical device that this device maps to.
+     * 
+ * + * optional string physical_device_desc = 7; + */ + public java.lang.String getPhysicalDeviceDesc() { + java.lang.Object ref = physicalDeviceDesc_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + physicalDeviceDesc_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * String representation of the physical device that this device maps to.
+     * 
+ * + * optional string physical_device_desc = 7; + */ + public com.google.protobuf.ByteString + getPhysicalDeviceDescBytes() { + java.lang.Object ref = physicalDeviceDesc_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + physicalDeviceDesc_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * String representation of the physical device that this device maps to.
+     * 
+ * + * optional string physical_device_desc = 7; + */ + public Builder setPhysicalDeviceDesc( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + physicalDeviceDesc_ = value; + onChanged(); + return this; + } + /** + *
+     * String representation of the physical device that this device maps to.
+     * 
+ * + * optional string physical_device_desc = 7; + */ + public Builder clearPhysicalDeviceDesc() { + + physicalDeviceDesc_ = getDefaultInstance().getPhysicalDeviceDesc(); + onChanged(); + return this; + } + /** + *
+     * String representation of the physical device that this device maps to.
+     * 
+ * + * optional string physical_device_desc = 7; + */ + public Builder setPhysicalDeviceDescBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + physicalDeviceDesc_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.DeviceAttributes) + } + + // @@protoc_insertion_point(class_scope:tensorflow.DeviceAttributes) + private static final org.tensorflow.framework.DeviceAttributes DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.DeviceAttributes(); + } + + public static org.tensorflow.framework.DeviceAttributes getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeviceAttributes parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeviceAttributes(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.DeviceAttributes getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributesOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributesOrBuilder.java new file mode 100644 index 0000000000000..199810ebc2a6a --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributesOrBuilder.java @@ -0,0 +1,110 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/device_attributes.proto + +package org.tensorflow.framework; + +public interface DeviceAttributesOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.DeviceAttributes) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Fully specified name of the device within a cluster.
+   * 
+ * + * optional string name = 1; + */ + java.lang.String getName(); + /** + *
+   * Fully specified name of the device within a cluster.
+   * 
+ * + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * String representation of device_type.
+   * 
+ * + * optional string device_type = 2; + */ + java.lang.String getDeviceType(); + /** + *
+   * String representation of device_type.
+   * 
+ * + * optional string device_type = 2; + */ + com.google.protobuf.ByteString + getDeviceTypeBytes(); + + /** + *
+   * Memory capacity of device in bytes.
+   * 
+ * + * optional int64 memory_limit = 4; + */ + long getMemoryLimit(); + + /** + *
+   * Platform-specific data about device that may be useful
+   * for supporting efficient data transfers.
+   * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + boolean hasLocality(); + /** + *
+   * Platform-specific data about device that may be useful
+   * for supporting efficient data transfers.
+   * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + org.tensorflow.framework.DeviceLocality getLocality(); + /** + *
+   * Platform-specific data about device that may be useful
+   * for supporting efficient data transfers.
+   * 
+ * + * optional .tensorflow.DeviceLocality locality = 5; + */ + org.tensorflow.framework.DeviceLocalityOrBuilder getLocalityOrBuilder(); + + /** + *
+   * A device is assigned a global unique number each time it is
+   * initialized. "incarnation" should never be 0.
+   * 
+ * + * optional fixed64 incarnation = 6; + */ + long getIncarnation(); + + /** + *
+   * String representation of the physical device that this device maps to.
+   * 
+ * + * optional string physical_device_desc = 7; + */ + java.lang.String getPhysicalDeviceDesc(); + /** + *
+   * String representation of the physical device that this device maps to.
+   * 
+ * + * optional string physical_device_desc = 7; + */ + com.google.protobuf.ByteString + getPhysicalDeviceDescBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributesProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributesProtos.java new file mode 100644 index 0000000000000..625584b5e7a78 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceAttributesProtos.java @@ -0,0 +1,73 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/device_attributes.proto + +package org.tensorflow.framework; + +public final class DeviceAttributesProtos { + private DeviceAttributesProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_DeviceLocality_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_DeviceLocality_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_DeviceAttributes_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_DeviceAttributes_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n1tensorflow/core/framework/device_attri" + + "butes.proto\022\ntensorflow\" \n\016DeviceLocalit" + + "y\022\016\n\006bus_id\030\001 \001(\005\"\254\001\n\020DeviceAttributes\022\014" + + "\n\004name\030\001 \001(\t\022\023\n\013device_type\030\002 \001(\t\022\024\n\014mem" + + "ory_limit\030\004 \001(\003\022,\n\010locality\030\005 \001(\0132\032.tens" + + "orflow.DeviceLocality\022\023\n\013incarnation\030\006 \001" + + "(\006\022\034\n\024physical_device_desc\030\007 \001(\tB7\n\030org." + + "tensorflow.frameworkB\026DeviceAttributesPr" + + "otosP\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + internal_static_tensorflow_DeviceLocality_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_DeviceLocality_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_DeviceLocality_descriptor, + new java.lang.String[] { "BusId", }); + internal_static_tensorflow_DeviceAttributes_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_tensorflow_DeviceAttributes_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_DeviceAttributes_descriptor, + new java.lang.String[] { "Name", "DeviceType", "MemoryLimit", "Locality", "Incarnation", "PhysicalDeviceDesc", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceLocality.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceLocality.java new file mode 100644 index 0000000000000..67bba71b65293 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceLocality.java @@ -0,0 +1,445 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/device_attributes.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.DeviceLocality} + */ +public final class DeviceLocality extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.DeviceLocality) + DeviceLocalityOrBuilder { + // Use DeviceLocality.newBuilder() to construct. + private DeviceLocality(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DeviceLocality() { + busId_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private DeviceLocality( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + busId_ = input.readInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceLocality_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceLocality_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.DeviceLocality.class, org.tensorflow.framework.DeviceLocality.Builder.class); + } + + public static final int BUS_ID_FIELD_NUMBER = 1; + private int busId_; + /** + *
+   * Optional bus locality of device.  Default value of 0 means
+   * no specific locality.  Specific localities are indexed from 1.
+   * 
+ * + * optional int32 bus_id = 1; + */ + public int getBusId() { + return busId_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (busId_ != 0) { + output.writeInt32(1, busId_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (busId_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, busId_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.DeviceLocality)) { + return super.equals(obj); + } + org.tensorflow.framework.DeviceLocality other = (org.tensorflow.framework.DeviceLocality) obj; + + boolean result = true; + result = result && (getBusId() + == other.getBusId()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + BUS_ID_FIELD_NUMBER; + hash = (53 * hash) + getBusId(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.DeviceLocality parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.DeviceLocality parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.DeviceLocality parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.DeviceLocality parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.DeviceLocality parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DeviceLocality parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.DeviceLocality parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DeviceLocality parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.DeviceLocality parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DeviceLocality parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.DeviceLocality prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.DeviceLocality} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.DeviceLocality) + org.tensorflow.framework.DeviceLocalityOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceLocality_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceLocality_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.DeviceLocality.class, org.tensorflow.framework.DeviceLocality.Builder.class); + } + + // Construct using org.tensorflow.framework.DeviceLocality.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + busId_ = 0; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.DeviceAttributesProtos.internal_static_tensorflow_DeviceLocality_descriptor; + } + + public org.tensorflow.framework.DeviceLocality getDefaultInstanceForType() { + return org.tensorflow.framework.DeviceLocality.getDefaultInstance(); + } + + public org.tensorflow.framework.DeviceLocality build() { + org.tensorflow.framework.DeviceLocality result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.DeviceLocality buildPartial() { + org.tensorflow.framework.DeviceLocality result = new org.tensorflow.framework.DeviceLocality(this); + result.busId_ = busId_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.DeviceLocality) { + return mergeFrom((org.tensorflow.framework.DeviceLocality)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.DeviceLocality other) { + if (other == org.tensorflow.framework.DeviceLocality.getDefaultInstance()) return this; + if (other.getBusId() != 0) { + setBusId(other.getBusId()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.DeviceLocality parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.DeviceLocality) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int busId_ ; + /** + *
+     * Optional bus locality of device.  Default value of 0 means
+     * no specific locality.  Specific localities are indexed from 1.
+     * 
+ * + * optional int32 bus_id = 1; + */ + public int getBusId() { + return busId_; + } + /** + *
+     * Optional bus locality of device.  Default value of 0 means
+     * no specific locality.  Specific localities are indexed from 1.
+     * 
+ * + * optional int32 bus_id = 1; + */ + public Builder setBusId(int value) { + + busId_ = value; + onChanged(); + return this; + } + /** + *
+     * Optional bus locality of device.  Default value of 0 means
+     * no specific locality.  Specific localities are indexed from 1.
+     * 
+ * + * optional int32 bus_id = 1; + */ + public Builder clearBusId() { + + busId_ = 0; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.DeviceLocality) + } + + // @@protoc_insertion_point(class_scope:tensorflow.DeviceLocality) + private static final org.tensorflow.framework.DeviceLocality DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.DeviceLocality(); + } + + public static org.tensorflow.framework.DeviceLocality getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeviceLocality parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeviceLocality(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.DeviceLocality getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceLocalityOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceLocalityOrBuilder.java new file mode 100644 index 0000000000000..52d871822b308 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceLocalityOrBuilder.java @@ -0,0 +1,19 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/device_attributes.proto + +package org.tensorflow.framework; + +public interface DeviceLocalityOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.DeviceLocality) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Optional bus locality of device.  Default value of 0 means
+   * no specific locality.  Specific localities are indexed from 1.
+   * 
+ * + * optional int32 bus_id = 1; + */ + int getBusId(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceStepStats.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceStepStats.java new file mode 100644 index 0000000000000..cabab9729a426 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceStepStats.java @@ -0,0 +1,842 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.DeviceStepStats} + */ +public final class DeviceStepStats extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.DeviceStepStats) + DeviceStepStatsOrBuilder { + // Use DeviceStepStats.newBuilder() to construct. + private DeviceStepStats(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DeviceStepStats() { + device_ = ""; + nodeStats_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private DeviceStepStats( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + device_ = s; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + nodeStats_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + nodeStats_.add( + input.readMessage(org.tensorflow.framework.NodeExecStats.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + nodeStats_ = java.util.Collections.unmodifiableList(nodeStats_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_DeviceStepStats_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_DeviceStepStats_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.DeviceStepStats.class, org.tensorflow.framework.DeviceStepStats.Builder.class); + } + + private int bitField0_; + public static final int DEVICE_FIELD_NUMBER = 1; + private volatile java.lang.Object device_; + /** + * optional string device = 1; + */ + public java.lang.String getDevice() { + java.lang.Object ref = device_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + device_ = s; + return s; + } + } + /** + * optional string device = 1; + */ + public com.google.protobuf.ByteString + getDeviceBytes() { + java.lang.Object ref = device_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + device_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int NODE_STATS_FIELD_NUMBER = 2; + private java.util.List nodeStats_; + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public java.util.List getNodeStatsList() { + return nodeStats_; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public java.util.List + getNodeStatsOrBuilderList() { + return nodeStats_; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public int getNodeStatsCount() { + return nodeStats_.size(); + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public org.tensorflow.framework.NodeExecStats getNodeStats(int index) { + return nodeStats_.get(index); + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public org.tensorflow.framework.NodeExecStatsOrBuilder getNodeStatsOrBuilder( + int index) { + return nodeStats_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getDeviceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, device_); + } + for (int i = 0; i < nodeStats_.size(); i++) { + output.writeMessage(2, nodeStats_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getDeviceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, device_); + } + for (int i = 0; i < nodeStats_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, nodeStats_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.DeviceStepStats)) { + return super.equals(obj); + } + org.tensorflow.framework.DeviceStepStats other = (org.tensorflow.framework.DeviceStepStats) obj; + + boolean result = true; + result = result && getDevice() + .equals(other.getDevice()); + result = result && getNodeStatsList() + .equals(other.getNodeStatsList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + DEVICE_FIELD_NUMBER; + hash = (53 * hash) + getDevice().hashCode(); + if (getNodeStatsCount() > 0) { + hash = (37 * hash) + NODE_STATS_FIELD_NUMBER; + hash = (53 * hash) + getNodeStatsList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.DeviceStepStats parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.DeviceStepStats parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.DeviceStepStats parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.DeviceStepStats parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.DeviceStepStats parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DeviceStepStats parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.DeviceStepStats parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DeviceStepStats parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.DeviceStepStats parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.DeviceStepStats parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.DeviceStepStats prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.DeviceStepStats} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.DeviceStepStats) + org.tensorflow.framework.DeviceStepStatsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_DeviceStepStats_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_DeviceStepStats_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.DeviceStepStats.class, org.tensorflow.framework.DeviceStepStats.Builder.class); + } + + // Construct using org.tensorflow.framework.DeviceStepStats.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getNodeStatsFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + device_ = ""; + + if (nodeStatsBuilder_ == null) { + nodeStats_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + nodeStatsBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_DeviceStepStats_descriptor; + } + + public org.tensorflow.framework.DeviceStepStats getDefaultInstanceForType() { + return org.tensorflow.framework.DeviceStepStats.getDefaultInstance(); + } + + public org.tensorflow.framework.DeviceStepStats build() { + org.tensorflow.framework.DeviceStepStats result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.DeviceStepStats buildPartial() { + org.tensorflow.framework.DeviceStepStats result = new org.tensorflow.framework.DeviceStepStats(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.device_ = device_; + if (nodeStatsBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + nodeStats_ = java.util.Collections.unmodifiableList(nodeStats_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.nodeStats_ = nodeStats_; + } else { + result.nodeStats_ = nodeStatsBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.DeviceStepStats) { + return mergeFrom((org.tensorflow.framework.DeviceStepStats)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.DeviceStepStats other) { + if (other == org.tensorflow.framework.DeviceStepStats.getDefaultInstance()) return this; + if (!other.getDevice().isEmpty()) { + device_ = other.device_; + onChanged(); + } + if (nodeStatsBuilder_ == null) { + if (!other.nodeStats_.isEmpty()) { + if (nodeStats_.isEmpty()) { + nodeStats_ = other.nodeStats_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureNodeStatsIsMutable(); + nodeStats_.addAll(other.nodeStats_); + } + onChanged(); + } + } else { + if (!other.nodeStats_.isEmpty()) { + if (nodeStatsBuilder_.isEmpty()) { + nodeStatsBuilder_.dispose(); + nodeStatsBuilder_ = null; + nodeStats_ = other.nodeStats_; + bitField0_ = (bitField0_ & ~0x00000002); + nodeStatsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getNodeStatsFieldBuilder() : null; + } else { + nodeStatsBuilder_.addAllMessages(other.nodeStats_); + } + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.DeviceStepStats parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.DeviceStepStats) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object device_ = ""; + /** + * optional string device = 1; + */ + public java.lang.String getDevice() { + java.lang.Object ref = device_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + device_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string device = 1; + */ + public com.google.protobuf.ByteString + getDeviceBytes() { + java.lang.Object ref = device_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + device_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string device = 1; + */ + public Builder setDevice( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + device_ = value; + onChanged(); + return this; + } + /** + * optional string device = 1; + */ + public Builder clearDevice() { + + device_ = getDefaultInstance().getDevice(); + onChanged(); + return this; + } + /** + * optional string device = 1; + */ + public Builder setDeviceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + device_ = value; + onChanged(); + return this; + } + + private java.util.List nodeStats_ = + java.util.Collections.emptyList(); + private void ensureNodeStatsIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + nodeStats_ = new java.util.ArrayList(nodeStats_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeExecStats, org.tensorflow.framework.NodeExecStats.Builder, org.tensorflow.framework.NodeExecStatsOrBuilder> nodeStatsBuilder_; + + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public java.util.List getNodeStatsList() { + if (nodeStatsBuilder_ == null) { + return java.util.Collections.unmodifiableList(nodeStats_); + } else { + return nodeStatsBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public int getNodeStatsCount() { + if (nodeStatsBuilder_ == null) { + return nodeStats_.size(); + } else { + return nodeStatsBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public org.tensorflow.framework.NodeExecStats getNodeStats(int index) { + if (nodeStatsBuilder_ == null) { + return nodeStats_.get(index); + } else { + return nodeStatsBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public Builder setNodeStats( + int index, org.tensorflow.framework.NodeExecStats value) { + if (nodeStatsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeStatsIsMutable(); + nodeStats_.set(index, value); + onChanged(); + } else { + nodeStatsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public Builder setNodeStats( + int index, org.tensorflow.framework.NodeExecStats.Builder builderForValue) { + if (nodeStatsBuilder_ == null) { + ensureNodeStatsIsMutable(); + nodeStats_.set(index, builderForValue.build()); + onChanged(); + } else { + nodeStatsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public Builder addNodeStats(org.tensorflow.framework.NodeExecStats value) { + if (nodeStatsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeStatsIsMutable(); + nodeStats_.add(value); + onChanged(); + } else { + nodeStatsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public Builder addNodeStats( + int index, org.tensorflow.framework.NodeExecStats value) { + if (nodeStatsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeStatsIsMutable(); + nodeStats_.add(index, value); + onChanged(); + } else { + nodeStatsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public Builder addNodeStats( + org.tensorflow.framework.NodeExecStats.Builder builderForValue) { + if (nodeStatsBuilder_ == null) { + ensureNodeStatsIsMutable(); + nodeStats_.add(builderForValue.build()); + onChanged(); + } else { + nodeStatsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public Builder addNodeStats( + int index, org.tensorflow.framework.NodeExecStats.Builder builderForValue) { + if (nodeStatsBuilder_ == null) { + ensureNodeStatsIsMutable(); + nodeStats_.add(index, builderForValue.build()); + onChanged(); + } else { + nodeStatsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public Builder addAllNodeStats( + java.lang.Iterable values) { + if (nodeStatsBuilder_ == null) { + ensureNodeStatsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, nodeStats_); + onChanged(); + } else { + nodeStatsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public Builder clearNodeStats() { + if (nodeStatsBuilder_ == null) { + nodeStats_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + nodeStatsBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public Builder removeNodeStats(int index) { + if (nodeStatsBuilder_ == null) { + ensureNodeStatsIsMutable(); + nodeStats_.remove(index); + onChanged(); + } else { + nodeStatsBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public org.tensorflow.framework.NodeExecStats.Builder getNodeStatsBuilder( + int index) { + return getNodeStatsFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public org.tensorflow.framework.NodeExecStatsOrBuilder getNodeStatsOrBuilder( + int index) { + if (nodeStatsBuilder_ == null) { + return nodeStats_.get(index); } else { + return nodeStatsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public java.util.List + getNodeStatsOrBuilderList() { + if (nodeStatsBuilder_ != null) { + return nodeStatsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(nodeStats_); + } + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public org.tensorflow.framework.NodeExecStats.Builder addNodeStatsBuilder() { + return getNodeStatsFieldBuilder().addBuilder( + org.tensorflow.framework.NodeExecStats.getDefaultInstance()); + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public org.tensorflow.framework.NodeExecStats.Builder addNodeStatsBuilder( + int index) { + return getNodeStatsFieldBuilder().addBuilder( + index, org.tensorflow.framework.NodeExecStats.getDefaultInstance()); + } + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + public java.util.List + getNodeStatsBuilderList() { + return getNodeStatsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeExecStats, org.tensorflow.framework.NodeExecStats.Builder, org.tensorflow.framework.NodeExecStatsOrBuilder> + getNodeStatsFieldBuilder() { + if (nodeStatsBuilder_ == null) { + nodeStatsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeExecStats, org.tensorflow.framework.NodeExecStats.Builder, org.tensorflow.framework.NodeExecStatsOrBuilder>( + nodeStats_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + nodeStats_ = null; + } + return nodeStatsBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.DeviceStepStats) + } + + // @@protoc_insertion_point(class_scope:tensorflow.DeviceStepStats) + private static final org.tensorflow.framework.DeviceStepStats DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.DeviceStepStats(); + } + + public static org.tensorflow.framework.DeviceStepStats getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public DeviceStepStats parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeviceStepStats(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.DeviceStepStats getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceStepStatsOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceStepStatsOrBuilder.java new file mode 100644 index 0000000000000..ad2fda44b8cf4 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/DeviceStepStatsOrBuilder.java @@ -0,0 +1,43 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +public interface DeviceStepStatsOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.DeviceStepStats) + com.google.protobuf.MessageOrBuilder { + + /** + * optional string device = 1; + */ + java.lang.String getDevice(); + /** + * optional string device = 1; + */ + com.google.protobuf.ByteString + getDeviceBytes(); + + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + java.util.List + getNodeStatsList(); + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + org.tensorflow.framework.NodeExecStats getNodeStats(int index); + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + int getNodeStatsCount(); + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + java.util.List + getNodeStatsOrBuilderList(); + /** + * repeated .tensorflow.NodeExecStats node_stats = 2; + */ + org.tensorflow.framework.NodeExecStatsOrBuilder getNodeStatsOrBuilder( + int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDef.java new file mode 100644 index 0000000000000..c3d6ff1c896ee --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDef.java @@ -0,0 +1,4081 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/function.proto + +package org.tensorflow.framework; + +/** + *
+ * A function can be instantiated when the runtime can bind every attr
+ * with a value. When a GraphDef has a call to a function, it must
+ * have binding for every attr defined in the signature.
+ * TODO(zhifengc):
+ *   * device spec, etc.
+ * 
+ * + * Protobuf type {@code tensorflow.FunctionDef} + */ +public final class FunctionDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.FunctionDef) + FunctionDefOrBuilder { + // Use FunctionDef.newBuilder() to construct. + private FunctionDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FunctionDef() { + node_ = java.util.Collections.emptyList(); + nodeDef_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private FunctionDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + org.tensorflow.framework.OpDef.Builder subBuilder = null; + if (signature_ != null) { + subBuilder = signature_.toBuilder(); + } + signature_ = input.readMessage(org.tensorflow.framework.OpDef.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(signature_); + signature_ = subBuilder.buildPartial(); + } + + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + node_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + node_.add( + input.readMessage(org.tensorflow.framework.FunctionDef.Node.parser(), extensionRegistry)); + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + nodeDef_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + nodeDef_.add( + input.readMessage(org.tensorflow.framework.NodeDef.parser(), extensionRegistry)); + break; + } + case 34: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + ret_ = com.google.protobuf.MapField.newMapField( + RetDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000010; + } + com.google.protobuf.MapEntry + ret__ = input.readMessage( + RetDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + ret_.getMutableMap().put( + ret__.getKey(), ret__.getValue()); + break; + } + case 42: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + attr_ = com.google.protobuf.MapField.newMapField( + AttrDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000002; + } + com.google.protobuf.MapEntry + attr__ = input.readMessage( + AttrDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + attr_.getMutableMap().put( + attr__.getKey(), attr__.getValue()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + node_ = java.util.Collections.unmodifiableList(node_); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + nodeDef_ = java.util.Collections.unmodifiableList(nodeDef_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 5: + return internalGetAttr(); + case 4: + return internalGetRet(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.FunctionDef.class, org.tensorflow.framework.FunctionDef.Builder.class); + } + + public interface NodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.FunctionDef.Node) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * This node produces multiple outputs. They are named ret[0],
+     * ret[1], ..., etc.
+     * REQUIRES: function.node.ret[*] are unique across all nodes.
+     * REQUIRES: ret.size == func/op def's number of output args.
+     * 
+ * + * repeated string ret = 1; + */ + java.util.List + getRetList(); + /** + *
+     * This node produces multiple outputs. They are named ret[0],
+     * ret[1], ..., etc.
+     * REQUIRES: function.node.ret[*] are unique across all nodes.
+     * REQUIRES: ret.size == func/op def's number of output args.
+     * 
+ * + * repeated string ret = 1; + */ + int getRetCount(); + /** + *
+     * This node produces multiple outputs. They are named ret[0],
+     * ret[1], ..., etc.
+     * REQUIRES: function.node.ret[*] are unique across all nodes.
+     * REQUIRES: ret.size == func/op def's number of output args.
+     * 
+ * + * repeated string ret = 1; + */ + java.lang.String getRet(int index); + /** + *
+     * This node produces multiple outputs. They are named ret[0],
+     * ret[1], ..., etc.
+     * REQUIRES: function.node.ret[*] are unique across all nodes.
+     * REQUIRES: ret.size == func/op def's number of output args.
+     * 
+ * + * repeated string ret = 1; + */ + com.google.protobuf.ByteString + getRetBytes(int index); + + /** + *
+     * The op/function name.
+     * 
+ * + * optional string op = 2; + */ + java.lang.String getOp(); + /** + *
+     * The op/function name.
+     * 
+ * + * optional string op = 2; + */ + com.google.protobuf.ByteString + getOpBytes(); + + /** + *
+     * Arguments passed to this func/op.
+     * arg[i] must be either one of
+     * function.signature.input_args[*].name or one of
+     * function.node[*].ret[*].
+     * REQUIRES: arg.size == func/op def's number of input args.
+     * 
+ * + * repeated string arg = 3; + */ + java.util.List + getArgList(); + /** + *
+     * Arguments passed to this func/op.
+     * arg[i] must be either one of
+     * function.signature.input_args[*].name or one of
+     * function.node[*].ret[*].
+     * REQUIRES: arg.size == func/op def's number of input args.
+     * 
+ * + * repeated string arg = 3; + */ + int getArgCount(); + /** + *
+     * Arguments passed to this func/op.
+     * arg[i] must be either one of
+     * function.signature.input_args[*].name or one of
+     * function.node[*].ret[*].
+     * REQUIRES: arg.size == func/op def's number of input args.
+     * 
+ * + * repeated string arg = 3; + */ + java.lang.String getArg(int index); + /** + *
+     * Arguments passed to this func/op.
+     * arg[i] must be either one of
+     * function.signature.input_args[*].name or one of
+     * function.node[*].ret[*].
+     * REQUIRES: arg.size == func/op def's number of input args.
+     * 
+ * + * repeated string arg = 3; + */ + com.google.protobuf.ByteString + getArgBytes(int index); + + /** + *
+     * Control dependencies.
+     * dep[i] must be one of function.node[*].ret[*] or one of
+     * function.signature.input_args[*].name.
+     * 
+ * + * repeated string dep = 4; + */ + java.util.List + getDepList(); + /** + *
+     * Control dependencies.
+     * dep[i] must be one of function.node[*].ret[*] or one of
+     * function.signature.input_args[*].name.
+     * 
+ * + * repeated string dep = 4; + */ + int getDepCount(); + /** + *
+     * Control dependencies.
+     * dep[i] must be one of function.node[*].ret[*] or one of
+     * function.signature.input_args[*].name.
+     * 
+ * + * repeated string dep = 4; + */ + java.lang.String getDep(int index); + /** + *
+     * Control dependencies.
+     * dep[i] must be one of function.node[*].ret[*] or one of
+     * function.signature.input_args[*].name.
+     * 
+ * + * repeated string dep = 4; + */ + com.google.protobuf.ByteString + getDepBytes(int index); + + /** + *
+     * Attrs.
+     * 'attr' maps names defined by 'func's attr defs to attr values.
+     * attr values may have placeholders which are substituted
+     * recursively by concrete values when this node is instantiated.
+     * These placeholders must name an attr listed in the FunctionDef's
+     * signature.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + int getAttrCount(); + /** + *
+     * Attrs.
+     * 'attr' maps names defined by 'func's attr defs to attr values.
+     * attr values may have placeholders which are substituted
+     * recursively by concrete values when this node is instantiated.
+     * These placeholders must name an attr listed in the FunctionDef's
+     * signature.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + boolean containsAttr( + java.lang.String key); + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getAttr(); + /** + *
+     * Attrs.
+     * 'attr' maps names defined by 'func's attr defs to attr values.
+     * attr values may have placeholders which are substituted
+     * recursively by concrete values when this node is instantiated.
+     * These placeholders must name an attr listed in the FunctionDef's
+     * signature.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + java.util.Map + getAttrMap(); + /** + *
+     * Attrs.
+     * 'attr' maps names defined by 'func's attr defs to attr values.
+     * attr values may have placeholders which are substituted
+     * recursively by concrete values when this node is instantiated.
+     * These placeholders must name an attr listed in the FunctionDef's
+     * signature.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue); + /** + *
+     * Attrs.
+     * 'attr' maps names defined by 'func's attr defs to attr values.
+     * attr values may have placeholders which are substituted
+     * recursively by concrete values when this node is instantiated.
+     * These placeholders must name an attr listed in the FunctionDef's
+     * signature.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key); + } + /** + *
+   * A node is a multi-value assignment:
+   *   (ret[0], ret[1], ...) = func(arg[0], arg[1], ...)
+   * By convention, "func" is resolved by consulting with a user-defined
+   * library first. If not resolved, "func" is assumed to be a builtin op.
+   * 
+ * + * Protobuf type {@code tensorflow.FunctionDef.Node} + */ + public static final class Node extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.FunctionDef.Node) + NodeOrBuilder { + // Use Node.newBuilder() to construct. + private Node(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Node() { + ret_ = com.google.protobuf.LazyStringArrayList.EMPTY; + op_ = ""; + arg_ = com.google.protobuf.LazyStringArrayList.EMPTY; + dep_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private Node( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + ret_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + ret_.add(s); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + op_ = s; + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + arg_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000004; + } + arg_.add(s); + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + dep_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000008; + } + dep_.add(s); + break; + } + case 42: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + attr_ = com.google.protobuf.MapField.newMapField( + AttrDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000010; + } + com.google.protobuf.MapEntry + attr__ = input.readMessage( + AttrDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + attr_.getMutableMap().put( + attr__.getKey(), attr__.getValue()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + ret_ = ret_.getUnmodifiableView(); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + arg_ = arg_.getUnmodifiableView(); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + dep_ = dep_.getUnmodifiableView(); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_Node_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 5: + return internalGetAttr(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_Node_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.FunctionDef.Node.class, org.tensorflow.framework.FunctionDef.Node.Builder.class); + } + + private int bitField0_; + public static final int RET_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList ret_; + /** + *
+     * This node produces multiple outputs. They are named ret[0],
+     * ret[1], ..., etc.
+     * REQUIRES: function.node.ret[*] are unique across all nodes.
+     * REQUIRES: ret.size == func/op def's number of output args.
+     * 
+ * + * repeated string ret = 1; + */ + public com.google.protobuf.ProtocolStringList + getRetList() { + return ret_; + } + /** + *
+     * This node produces multiple outputs. They are named ret[0],
+     * ret[1], ..., etc.
+     * REQUIRES: function.node.ret[*] are unique across all nodes.
+     * REQUIRES: ret.size == func/op def's number of output args.
+     * 
+ * + * repeated string ret = 1; + */ + public int getRetCount() { + return ret_.size(); + } + /** + *
+     * This node produces multiple outputs. They are named ret[0],
+     * ret[1], ..., etc.
+     * REQUIRES: function.node.ret[*] are unique across all nodes.
+     * REQUIRES: ret.size == func/op def's number of output args.
+     * 
+ * + * repeated string ret = 1; + */ + public java.lang.String getRet(int index) { + return ret_.get(index); + } + /** + *
+     * This node produces multiple outputs. They are named ret[0],
+     * ret[1], ..., etc.
+     * REQUIRES: function.node.ret[*] are unique across all nodes.
+     * REQUIRES: ret.size == func/op def's number of output args.
+     * 
+ * + * repeated string ret = 1; + */ + public com.google.protobuf.ByteString + getRetBytes(int index) { + return ret_.getByteString(index); + } + + public static final int OP_FIELD_NUMBER = 2; + private volatile java.lang.Object op_; + /** + *
+     * The op/function name.
+     * 
+ * + * optional string op = 2; + */ + public java.lang.String getOp() { + java.lang.Object ref = op_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + op_ = s; + return s; + } + } + /** + *
+     * The op/function name.
+     * 
+ * + * optional string op = 2; + */ + public com.google.protobuf.ByteString + getOpBytes() { + java.lang.Object ref = op_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + op_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ARG_FIELD_NUMBER = 3; + private com.google.protobuf.LazyStringList arg_; + /** + *
+     * Arguments passed to this func/op.
+     * arg[i] must be either one of
+     * function.signature.input_args[*].name or one of
+     * function.node[*].ret[*].
+     * REQUIRES: arg.size == func/op def's number of input args.
+     * 
+ * + * repeated string arg = 3; + */ + public com.google.protobuf.ProtocolStringList + getArgList() { + return arg_; + } + /** + *
+     * Arguments passed to this func/op.
+     * arg[i] must be either one of
+     * function.signature.input_args[*].name or one of
+     * function.node[*].ret[*].
+     * REQUIRES: arg.size == func/op def's number of input args.
+     * 
+ * + * repeated string arg = 3; + */ + public int getArgCount() { + return arg_.size(); + } + /** + *
+     * Arguments passed to this func/op.
+     * arg[i] must be either one of
+     * function.signature.input_args[*].name or one of
+     * function.node[*].ret[*].
+     * REQUIRES: arg.size == func/op def's number of input args.
+     * 
+ * + * repeated string arg = 3; + */ + public java.lang.String getArg(int index) { + return arg_.get(index); + } + /** + *
+     * Arguments passed to this func/op.
+     * arg[i] must be either one of
+     * function.signature.input_args[*].name or one of
+     * function.node[*].ret[*].
+     * REQUIRES: arg.size == func/op def's number of input args.
+     * 
+ * + * repeated string arg = 3; + */ + public com.google.protobuf.ByteString + getArgBytes(int index) { + return arg_.getByteString(index); + } + + public static final int DEP_FIELD_NUMBER = 4; + private com.google.protobuf.LazyStringList dep_; + /** + *
+     * Control dependencies.
+     * dep[i] must be one of function.node[*].ret[*] or one of
+     * function.signature.input_args[*].name.
+     * 
+ * + * repeated string dep = 4; + */ + public com.google.protobuf.ProtocolStringList + getDepList() { + return dep_; + } + /** + *
+     * Control dependencies.
+     * dep[i] must be one of function.node[*].ret[*] or one of
+     * function.signature.input_args[*].name.
+     * 
+ * + * repeated string dep = 4; + */ + public int getDepCount() { + return dep_.size(); + } + /** + *
+     * Control dependencies.
+     * dep[i] must be one of function.node[*].ret[*] or one of
+     * function.signature.input_args[*].name.
+     * 
+ * + * repeated string dep = 4; + */ + public java.lang.String getDep(int index) { + return dep_.get(index); + } + /** + *
+     * Control dependencies.
+     * dep[i] must be one of function.node[*].ret[*] or one of
+     * function.signature.input_args[*].name.
+     * 
+ * + * repeated string dep = 4; + */ + public com.google.protobuf.ByteString + getDepBytes(int index) { + return dep_.getByteString(index); + } + + public static final int ATTR_FIELD_NUMBER = 5; + private static final class AttrDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, org.tensorflow.framework.AttrValue> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_Node_AttrEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + org.tensorflow.framework.AttrValue.getDefaultInstance()); + } + private com.google.protobuf.MapField< + java.lang.String, org.tensorflow.framework.AttrValue> attr_; + private com.google.protobuf.MapField + internalGetAttr() { + if (attr_ == null) { + return com.google.protobuf.MapField.emptyMapField( + AttrDefaultEntryHolder.defaultEntry); + } + return attr_; + } + + public int getAttrCount() { + return internalGetAttr().getMap().size(); + } + /** + *
+     * Attrs.
+     * 'attr' maps names defined by 'func's attr defs to attr values.
+     * attr values may have placeholders which are substituted
+     * recursively by concrete values when this node is instantiated.
+     * These placeholders must name an attr listed in the FunctionDef's
+     * signature.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public boolean containsAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetAttr().getMap().containsKey(key); + } + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getAttr() { + return getAttrMap(); + } + /** + *
+     * Attrs.
+     * 'attr' maps names defined by 'func's attr defs to attr values.
+     * attr values may have placeholders which are substituted
+     * recursively by concrete values when this node is instantiated.
+     * These placeholders must name an attr listed in the FunctionDef's
+     * signature.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public java.util.Map getAttrMap() { + return internalGetAttr().getMap(); + } + /** + *
+     * Attrs.
+     * 'attr' maps names defined by 'func's attr defs to attr values.
+     * attr values may have placeholders which are substituted
+     * recursively by concrete values when this node is instantiated.
+     * These placeholders must name an attr listed in the FunctionDef's
+     * signature.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Attrs.
+     * 'attr' maps names defined by 'func's attr defs to attr values.
+     * attr values may have placeholders which are substituted
+     * recursively by concrete values when this node is instantiated.
+     * These placeholders must name an attr listed in the FunctionDef's
+     * signature.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < ret_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, ret_.getRaw(i)); + } + if (!getOpBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, op_); + } + for (int i = 0; i < arg_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, arg_.getRaw(i)); + } + for (int i = 0; i < dep_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, dep_.getRaw(i)); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetAttr(), + AttrDefaultEntryHolder.defaultEntry, + 5); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < ret_.size(); i++) { + dataSize += computeStringSizeNoTag(ret_.getRaw(i)); + } + size += dataSize; + size += 1 * getRetList().size(); + } + if (!getOpBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, op_); + } + { + int dataSize = 0; + for (int i = 0; i < arg_.size(); i++) { + dataSize += computeStringSizeNoTag(arg_.getRaw(i)); + } + size += dataSize; + size += 1 * getArgList().size(); + } + { + int dataSize = 0; + for (int i = 0; i < dep_.size(); i++) { + dataSize += computeStringSizeNoTag(dep_.getRaw(i)); + } + size += dataSize; + size += 1 * getDepList().size(); + } + for (java.util.Map.Entry entry + : internalGetAttr().getMap().entrySet()) { + com.google.protobuf.MapEntry + attr__ = AttrDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, attr__); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.FunctionDef.Node)) { + return super.equals(obj); + } + org.tensorflow.framework.FunctionDef.Node other = (org.tensorflow.framework.FunctionDef.Node) obj; + + boolean result = true; + result = result && getRetList() + .equals(other.getRetList()); + result = result && getOp() + .equals(other.getOp()); + result = result && getArgList() + .equals(other.getArgList()); + result = result && getDepList() + .equals(other.getDepList()); + result = result && internalGetAttr().equals( + other.internalGetAttr()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRetCount() > 0) { + hash = (37 * hash) + RET_FIELD_NUMBER; + hash = (53 * hash) + getRetList().hashCode(); + } + hash = (37 * hash) + OP_FIELD_NUMBER; + hash = (53 * hash) + getOp().hashCode(); + if (getArgCount() > 0) { + hash = (37 * hash) + ARG_FIELD_NUMBER; + hash = (53 * hash) + getArgList().hashCode(); + } + if (getDepCount() > 0) { + hash = (37 * hash) + DEP_FIELD_NUMBER; + hash = (53 * hash) + getDepList().hashCode(); + } + if (!internalGetAttr().getMap().isEmpty()) { + hash = (37 * hash) + ATTR_FIELD_NUMBER; + hash = (53 * hash) + internalGetAttr().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.FunctionDef.Node parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.FunctionDef.Node parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDef.Node parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.FunctionDef.Node parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDef.Node parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.FunctionDef.Node parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDef.Node parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.FunctionDef.Node parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDef.Node parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.FunctionDef.Node parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.FunctionDef.Node prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * A node is a multi-value assignment:
+     *   (ret[0], ret[1], ...) = func(arg[0], arg[1], ...)
+     * By convention, "func" is resolved by consulting with a user-defined
+     * library first. If not resolved, "func" is assumed to be a builtin op.
+     * 
+ * + * Protobuf type {@code tensorflow.FunctionDef.Node} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.FunctionDef.Node) + org.tensorflow.framework.FunctionDef.NodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_Node_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 5: + return internalGetAttr(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 5: + return internalGetMutableAttr(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_Node_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.FunctionDef.Node.class, org.tensorflow.framework.FunctionDef.Node.Builder.class); + } + + // Construct using org.tensorflow.framework.FunctionDef.Node.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + ret_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + op_ = ""; + + arg_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + dep_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + internalGetMutableAttr().clear(); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_Node_descriptor; + } + + public org.tensorflow.framework.FunctionDef.Node getDefaultInstanceForType() { + return org.tensorflow.framework.FunctionDef.Node.getDefaultInstance(); + } + + public org.tensorflow.framework.FunctionDef.Node build() { + org.tensorflow.framework.FunctionDef.Node result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.FunctionDef.Node buildPartial() { + org.tensorflow.framework.FunctionDef.Node result = new org.tensorflow.framework.FunctionDef.Node(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + ret_ = ret_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.ret_ = ret_; + result.op_ = op_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + arg_ = arg_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.arg_ = arg_; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + dep_ = dep_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.dep_ = dep_; + result.attr_ = internalGetAttr(); + result.attr_.makeImmutable(); + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.FunctionDef.Node) { + return mergeFrom((org.tensorflow.framework.FunctionDef.Node)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.FunctionDef.Node other) { + if (other == org.tensorflow.framework.FunctionDef.Node.getDefaultInstance()) return this; + if (!other.ret_.isEmpty()) { + if (ret_.isEmpty()) { + ret_ = other.ret_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRetIsMutable(); + ret_.addAll(other.ret_); + } + onChanged(); + } + if (!other.getOp().isEmpty()) { + op_ = other.op_; + onChanged(); + } + if (!other.arg_.isEmpty()) { + if (arg_.isEmpty()) { + arg_ = other.arg_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureArgIsMutable(); + arg_.addAll(other.arg_); + } + onChanged(); + } + if (!other.dep_.isEmpty()) { + if (dep_.isEmpty()) { + dep_ = other.dep_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureDepIsMutable(); + dep_.addAll(other.dep_); + } + onChanged(); + } + internalGetMutableAttr().mergeFrom( + other.internalGetAttr()); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.FunctionDef.Node parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.FunctionDef.Node) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private com.google.protobuf.LazyStringList ret_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureRetIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + ret_ = new com.google.protobuf.LazyStringArrayList(ret_); + bitField0_ |= 0x00000001; + } + } + /** + *
+       * This node produces multiple outputs. They are named ret[0],
+       * ret[1], ..., etc.
+       * REQUIRES: function.node.ret[*] are unique across all nodes.
+       * REQUIRES: ret.size == func/op def's number of output args.
+       * 
+ * + * repeated string ret = 1; + */ + public com.google.protobuf.ProtocolStringList + getRetList() { + return ret_.getUnmodifiableView(); + } + /** + *
+       * This node produces multiple outputs. They are named ret[0],
+       * ret[1], ..., etc.
+       * REQUIRES: function.node.ret[*] are unique across all nodes.
+       * REQUIRES: ret.size == func/op def's number of output args.
+       * 
+ * + * repeated string ret = 1; + */ + public int getRetCount() { + return ret_.size(); + } + /** + *
+       * This node produces multiple outputs. They are named ret[0],
+       * ret[1], ..., etc.
+       * REQUIRES: function.node.ret[*] are unique across all nodes.
+       * REQUIRES: ret.size == func/op def's number of output args.
+       * 
+ * + * repeated string ret = 1; + */ + public java.lang.String getRet(int index) { + return ret_.get(index); + } + /** + *
+       * This node produces multiple outputs. They are named ret[0],
+       * ret[1], ..., etc.
+       * REQUIRES: function.node.ret[*] are unique across all nodes.
+       * REQUIRES: ret.size == func/op def's number of output args.
+       * 
+ * + * repeated string ret = 1; + */ + public com.google.protobuf.ByteString + getRetBytes(int index) { + return ret_.getByteString(index); + } + /** + *
+       * This node produces multiple outputs. They are named ret[0],
+       * ret[1], ..., etc.
+       * REQUIRES: function.node.ret[*] are unique across all nodes.
+       * REQUIRES: ret.size == func/op def's number of output args.
+       * 
+ * + * repeated string ret = 1; + */ + public Builder setRet( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRetIsMutable(); + ret_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * This node produces multiple outputs. They are named ret[0],
+       * ret[1], ..., etc.
+       * REQUIRES: function.node.ret[*] are unique across all nodes.
+       * REQUIRES: ret.size == func/op def's number of output args.
+       * 
+ * + * repeated string ret = 1; + */ + public Builder addRet( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRetIsMutable(); + ret_.add(value); + onChanged(); + return this; + } + /** + *
+       * This node produces multiple outputs. They are named ret[0],
+       * ret[1], ..., etc.
+       * REQUIRES: function.node.ret[*] are unique across all nodes.
+       * REQUIRES: ret.size == func/op def's number of output args.
+       * 
+ * + * repeated string ret = 1; + */ + public Builder addAllRet( + java.lang.Iterable values) { + ensureRetIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, ret_); + onChanged(); + return this; + } + /** + *
+       * This node produces multiple outputs. They are named ret[0],
+       * ret[1], ..., etc.
+       * REQUIRES: function.node.ret[*] are unique across all nodes.
+       * REQUIRES: ret.size == func/op def's number of output args.
+       * 
+ * + * repeated string ret = 1; + */ + public Builder clearRet() { + ret_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * This node produces multiple outputs. They are named ret[0],
+       * ret[1], ..., etc.
+       * REQUIRES: function.node.ret[*] are unique across all nodes.
+       * REQUIRES: ret.size == func/op def's number of output args.
+       * 
+ * + * repeated string ret = 1; + */ + public Builder addRetBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureRetIsMutable(); + ret_.add(value); + onChanged(); + return this; + } + + private java.lang.Object op_ = ""; + /** + *
+       * The op/function name.
+       * 
+ * + * optional string op = 2; + */ + public java.lang.String getOp() { + java.lang.Object ref = op_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + op_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The op/function name.
+       * 
+ * + * optional string op = 2; + */ + public com.google.protobuf.ByteString + getOpBytes() { + java.lang.Object ref = op_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + op_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The op/function name.
+       * 
+ * + * optional string op = 2; + */ + public Builder setOp( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + op_ = value; + onChanged(); + return this; + } + /** + *
+       * The op/function name.
+       * 
+ * + * optional string op = 2; + */ + public Builder clearOp() { + + op_ = getDefaultInstance().getOp(); + onChanged(); + return this; + } + /** + *
+       * The op/function name.
+       * 
+ * + * optional string op = 2; + */ + public Builder setOpBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + op_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList arg_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureArgIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + arg_ = new com.google.protobuf.LazyStringArrayList(arg_); + bitField0_ |= 0x00000004; + } + } + /** + *
+       * Arguments passed to this func/op.
+       * arg[i] must be either one of
+       * function.signature.input_args[*].name or one of
+       * function.node[*].ret[*].
+       * REQUIRES: arg.size == func/op def's number of input args.
+       * 
+ * + * repeated string arg = 3; + */ + public com.google.protobuf.ProtocolStringList + getArgList() { + return arg_.getUnmodifiableView(); + } + /** + *
+       * Arguments passed to this func/op.
+       * arg[i] must be either one of
+       * function.signature.input_args[*].name or one of
+       * function.node[*].ret[*].
+       * REQUIRES: arg.size == func/op def's number of input args.
+       * 
+ * + * repeated string arg = 3; + */ + public int getArgCount() { + return arg_.size(); + } + /** + *
+       * Arguments passed to this func/op.
+       * arg[i] must be either one of
+       * function.signature.input_args[*].name or one of
+       * function.node[*].ret[*].
+       * REQUIRES: arg.size == func/op def's number of input args.
+       * 
+ * + * repeated string arg = 3; + */ + public java.lang.String getArg(int index) { + return arg_.get(index); + } + /** + *
+       * Arguments passed to this func/op.
+       * arg[i] must be either one of
+       * function.signature.input_args[*].name or one of
+       * function.node[*].ret[*].
+       * REQUIRES: arg.size == func/op def's number of input args.
+       * 
+ * + * repeated string arg = 3; + */ + public com.google.protobuf.ByteString + getArgBytes(int index) { + return arg_.getByteString(index); + } + /** + *
+       * Arguments passed to this func/op.
+       * arg[i] must be either one of
+       * function.signature.input_args[*].name or one of
+       * function.node[*].ret[*].
+       * REQUIRES: arg.size == func/op def's number of input args.
+       * 
+ * + * repeated string arg = 3; + */ + public Builder setArg( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureArgIsMutable(); + arg_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * Arguments passed to this func/op.
+       * arg[i] must be either one of
+       * function.signature.input_args[*].name or one of
+       * function.node[*].ret[*].
+       * REQUIRES: arg.size == func/op def's number of input args.
+       * 
+ * + * repeated string arg = 3; + */ + public Builder addArg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureArgIsMutable(); + arg_.add(value); + onChanged(); + return this; + } + /** + *
+       * Arguments passed to this func/op.
+       * arg[i] must be either one of
+       * function.signature.input_args[*].name or one of
+       * function.node[*].ret[*].
+       * REQUIRES: arg.size == func/op def's number of input args.
+       * 
+ * + * repeated string arg = 3; + */ + public Builder addAllArg( + java.lang.Iterable values) { + ensureArgIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, arg_); + onChanged(); + return this; + } + /** + *
+       * Arguments passed to this func/op.
+       * arg[i] must be either one of
+       * function.signature.input_args[*].name or one of
+       * function.node[*].ret[*].
+       * REQUIRES: arg.size == func/op def's number of input args.
+       * 
+ * + * repeated string arg = 3; + */ + public Builder clearArg() { + arg_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * Arguments passed to this func/op.
+       * arg[i] must be either one of
+       * function.signature.input_args[*].name or one of
+       * function.node[*].ret[*].
+       * REQUIRES: arg.size == func/op def's number of input args.
+       * 
+ * + * repeated string arg = 3; + */ + public Builder addArgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureArgIsMutable(); + arg_.add(value); + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList dep_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureDepIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + dep_ = new com.google.protobuf.LazyStringArrayList(dep_); + bitField0_ |= 0x00000008; + } + } + /** + *
+       * Control dependencies.
+       * dep[i] must be one of function.node[*].ret[*] or one of
+       * function.signature.input_args[*].name.
+       * 
+ * + * repeated string dep = 4; + */ + public com.google.protobuf.ProtocolStringList + getDepList() { + return dep_.getUnmodifiableView(); + } + /** + *
+       * Control dependencies.
+       * dep[i] must be one of function.node[*].ret[*] or one of
+       * function.signature.input_args[*].name.
+       * 
+ * + * repeated string dep = 4; + */ + public int getDepCount() { + return dep_.size(); + } + /** + *
+       * Control dependencies.
+       * dep[i] must be one of function.node[*].ret[*] or one of
+       * function.signature.input_args[*].name.
+       * 
+ * + * repeated string dep = 4; + */ + public java.lang.String getDep(int index) { + return dep_.get(index); + } + /** + *
+       * Control dependencies.
+       * dep[i] must be one of function.node[*].ret[*] or one of
+       * function.signature.input_args[*].name.
+       * 
+ * + * repeated string dep = 4; + */ + public com.google.protobuf.ByteString + getDepBytes(int index) { + return dep_.getByteString(index); + } + /** + *
+       * Control dependencies.
+       * dep[i] must be one of function.node[*].ret[*] or one of
+       * function.signature.input_args[*].name.
+       * 
+ * + * repeated string dep = 4; + */ + public Builder setDep( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureDepIsMutable(); + dep_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * Control dependencies.
+       * dep[i] must be one of function.node[*].ret[*] or one of
+       * function.signature.input_args[*].name.
+       * 
+ * + * repeated string dep = 4; + */ + public Builder addDep( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureDepIsMutable(); + dep_.add(value); + onChanged(); + return this; + } + /** + *
+       * Control dependencies.
+       * dep[i] must be one of function.node[*].ret[*] or one of
+       * function.signature.input_args[*].name.
+       * 
+ * + * repeated string dep = 4; + */ + public Builder addAllDep( + java.lang.Iterable values) { + ensureDepIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, dep_); + onChanged(); + return this; + } + /** + *
+       * Control dependencies.
+       * dep[i] must be one of function.node[*].ret[*] or one of
+       * function.signature.input_args[*].name.
+       * 
+ * + * repeated string dep = 4; + */ + public Builder clearDep() { + dep_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + *
+       * Control dependencies.
+       * dep[i] must be one of function.node[*].ret[*] or one of
+       * function.signature.input_args[*].name.
+       * 
+ * + * repeated string dep = 4; + */ + public Builder addDepBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureDepIsMutable(); + dep_.add(value); + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + java.lang.String, org.tensorflow.framework.AttrValue> attr_; + private com.google.protobuf.MapField + internalGetAttr() { + if (attr_ == null) { + return com.google.protobuf.MapField.emptyMapField( + AttrDefaultEntryHolder.defaultEntry); + } + return attr_; + } + private com.google.protobuf.MapField + internalGetMutableAttr() { + onChanged();; + if (attr_ == null) { + attr_ = com.google.protobuf.MapField.newMapField( + AttrDefaultEntryHolder.defaultEntry); + } + if (!attr_.isMutable()) { + attr_ = attr_.copy(); + } + return attr_; + } + + public int getAttrCount() { + return internalGetAttr().getMap().size(); + } + /** + *
+       * Attrs.
+       * 'attr' maps names defined by 'func's attr defs to attr values.
+       * attr values may have placeholders which are substituted
+       * recursively by concrete values when this node is instantiated.
+       * These placeholders must name an attr listed in the FunctionDef's
+       * signature.
+       * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public boolean containsAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetAttr().getMap().containsKey(key); + } + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getAttr() { + return getAttrMap(); + } + /** + *
+       * Attrs.
+       * 'attr' maps names defined by 'func's attr defs to attr values.
+       * attr values may have placeholders which are substituted
+       * recursively by concrete values when this node is instantiated.
+       * These placeholders must name an attr listed in the FunctionDef's
+       * signature.
+       * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public java.util.Map getAttrMap() { + return internalGetAttr().getMap(); + } + /** + *
+       * Attrs.
+       * 'attr' maps names defined by 'func's attr defs to attr values.
+       * attr values may have placeholders which are substituted
+       * recursively by concrete values when this node is instantiated.
+       * These placeholders must name an attr listed in the FunctionDef's
+       * signature.
+       * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+       * Attrs.
+       * 'attr' maps names defined by 'func's attr defs to attr values.
+       * attr values may have placeholders which are substituted
+       * recursively by concrete values when this node is instantiated.
+       * These placeholders must name an attr listed in the FunctionDef's
+       * signature.
+       * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearAttr() { + getMutableAttr().clear(); + return this; + } + /** + *
+       * Attrs.
+       * 'attr' maps names defined by 'func's attr defs to attr values.
+       * attr values may have placeholders which are substituted
+       * recursively by concrete values when this node is instantiated.
+       * These placeholders must name an attr listed in the FunctionDef's
+       * signature.
+       * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public Builder removeAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + getMutableAttr().remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableAttr() { + return internalGetMutableAttr().getMutableMap(); + } + /** + *
+       * Attrs.
+       * 'attr' maps names defined by 'func's attr defs to attr values.
+       * attr values may have placeholders which are substituted
+       * recursively by concrete values when this node is instantiated.
+       * These placeholders must name an attr listed in the FunctionDef's
+       * signature.
+       * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + public Builder putAttr( + java.lang.String key, + org.tensorflow.framework.AttrValue value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + getMutableAttr().put(key, value); + return this; + } + /** + *
+       * Attrs.
+       * 'attr' maps names defined by 'func's attr defs to attr values.
+       * attr values may have placeholders which are substituted
+       * recursively by concrete values when this node is instantiated.
+       * These placeholders must name an attr listed in the FunctionDef's
+       * signature.
+       * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public Builder putAllAttr( + java.util.Map values) { + getMutableAttr().putAll(values); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.FunctionDef.Node) + } + + // @@protoc_insertion_point(class_scope:tensorflow.FunctionDef.Node) + private static final org.tensorflow.framework.FunctionDef.Node DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.FunctionDef.Node(); + } + + public static org.tensorflow.framework.FunctionDef.Node getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Node parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Node(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.FunctionDef.Node getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private int bitField0_; + public static final int SIGNATURE_FIELD_NUMBER = 1; + private org.tensorflow.framework.OpDef signature_; + /** + *
+   * The definition of the function's name, arguments, return values,
+   * attrs etc.
+   * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public boolean hasSignature() { + return signature_ != null; + } + /** + *
+   * The definition of the function's name, arguments, return values,
+   * attrs etc.
+   * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public org.tensorflow.framework.OpDef getSignature() { + return signature_ == null ? org.tensorflow.framework.OpDef.getDefaultInstance() : signature_; + } + /** + *
+   * The definition of the function's name, arguments, return values,
+   * attrs etc.
+   * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public org.tensorflow.framework.OpDefOrBuilder getSignatureOrBuilder() { + return getSignature(); + } + + public static final int ATTR_FIELD_NUMBER = 5; + private static final class AttrDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, org.tensorflow.framework.AttrValue> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_AttrEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + org.tensorflow.framework.AttrValue.getDefaultInstance()); + } + private com.google.protobuf.MapField< + java.lang.String, org.tensorflow.framework.AttrValue> attr_; + private com.google.protobuf.MapField + internalGetAttr() { + if (attr_ == null) { + return com.google.protobuf.MapField.emptyMapField( + AttrDefaultEntryHolder.defaultEntry); + } + return attr_; + } + + public int getAttrCount() { + return internalGetAttr().getMap().size(); + } + /** + *
+   * Attributes specific to this function definition.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public boolean containsAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetAttr().getMap().containsKey(key); + } + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getAttr() { + return getAttrMap(); + } + /** + *
+   * Attributes specific to this function definition.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public java.util.Map getAttrMap() { + return internalGetAttr().getMap(); + } + /** + *
+   * Attributes specific to this function definition.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Attributes specific to this function definition.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public static final int NODE_FIELD_NUMBER = 2; + private java.util.List node_; + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public java.util.List getNodeList() { + return node_; + } + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public java.util.List + getNodeOrBuilderList() { + return node_; + } + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public int getNodeCount() { + return node_.size(); + } + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public org.tensorflow.framework.FunctionDef.Node getNode(int index) { + return node_.get(index); + } + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public org.tensorflow.framework.FunctionDef.NodeOrBuilder getNodeOrBuilder( + int index) { + return node_.get(index); + } + + public static final int NODE_DEF_FIELD_NUMBER = 3; + private java.util.List nodeDef_; + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public java.util.List getNodeDefList() { + return nodeDef_; + } + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public java.util.List + getNodeDefOrBuilderList() { + return nodeDef_; + } + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public int getNodeDefCount() { + return nodeDef_.size(); + } + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public org.tensorflow.framework.NodeDef getNodeDef(int index) { + return nodeDef_.get(index); + } + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public org.tensorflow.framework.NodeDefOrBuilder getNodeDefOrBuilder( + int index) { + return nodeDef_.get(index); + } + + public static final int RET_FIELD_NUMBER = 4; + private static final class RetDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_RetEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> ret_; + private com.google.protobuf.MapField + internalGetRet() { + if (ret_ == null) { + return com.google.protobuf.MapField.emptyMapField( + RetDefaultEntryHolder.defaultEntry); + } + return ret_; + } + + public int getRetCount() { + return internalGetRet().getMap().size(); + } + /** + *
+   * A mapping from the output arg names from `signature` to the
+   * outputs from `node_def` that should be returned by the function.
+   * 
+ * + * map<string, string> ret = 4; + */ + + public boolean containsRet( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetRet().getMap().containsKey(key); + } + /** + * Use {@link #getRetMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getRet() { + return getRetMap(); + } + /** + *
+   * A mapping from the output arg names from `signature` to the
+   * outputs from `node_def` that should be returned by the function.
+   * 
+ * + * map<string, string> ret = 4; + */ + + public java.util.Map getRetMap() { + return internalGetRet().getMap(); + } + /** + *
+   * A mapping from the output arg names from `signature` to the
+   * outputs from `node_def` that should be returned by the function.
+   * 
+ * + * map<string, string> ret = 4; + */ + + public java.lang.String getRetOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetRet().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * A mapping from the output arg names from `signature` to the
+   * outputs from `node_def` that should be returned by the function.
+   * 
+ * + * map<string, string> ret = 4; + */ + + public java.lang.String getRetOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetRet().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (signature_ != null) { + output.writeMessage(1, getSignature()); + } + for (int i = 0; i < node_.size(); i++) { + output.writeMessage(2, node_.get(i)); + } + for (int i = 0; i < nodeDef_.size(); i++) { + output.writeMessage(3, nodeDef_.get(i)); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetRet(), + RetDefaultEntryHolder.defaultEntry, + 4); + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetAttr(), + AttrDefaultEntryHolder.defaultEntry, + 5); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (signature_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getSignature()); + } + for (int i = 0; i < node_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, node_.get(i)); + } + for (int i = 0; i < nodeDef_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, nodeDef_.get(i)); + } + for (java.util.Map.Entry entry + : internalGetRet().getMap().entrySet()) { + com.google.protobuf.MapEntry + ret__ = RetDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, ret__); + } + for (java.util.Map.Entry entry + : internalGetAttr().getMap().entrySet()) { + com.google.protobuf.MapEntry + attr__ = AttrDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, attr__); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.FunctionDef)) { + return super.equals(obj); + } + org.tensorflow.framework.FunctionDef other = (org.tensorflow.framework.FunctionDef) obj; + + boolean result = true; + result = result && (hasSignature() == other.hasSignature()); + if (hasSignature()) { + result = result && getSignature() + .equals(other.getSignature()); + } + result = result && internalGetAttr().equals( + other.internalGetAttr()); + result = result && getNodeList() + .equals(other.getNodeList()); + result = result && getNodeDefList() + .equals(other.getNodeDefList()); + result = result && internalGetRet().equals( + other.internalGetRet()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasSignature()) { + hash = (37 * hash) + SIGNATURE_FIELD_NUMBER; + hash = (53 * hash) + getSignature().hashCode(); + } + if (!internalGetAttr().getMap().isEmpty()) { + hash = (37 * hash) + ATTR_FIELD_NUMBER; + hash = (53 * hash) + internalGetAttr().hashCode(); + } + if (getNodeCount() > 0) { + hash = (37 * hash) + NODE_FIELD_NUMBER; + hash = (53 * hash) + getNodeList().hashCode(); + } + if (getNodeDefCount() > 0) { + hash = (37 * hash) + NODE_DEF_FIELD_NUMBER; + hash = (53 * hash) + getNodeDefList().hashCode(); + } + if (!internalGetRet().getMap().isEmpty()) { + hash = (37 * hash) + RET_FIELD_NUMBER; + hash = (53 * hash) + internalGetRet().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.FunctionDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.FunctionDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.FunctionDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.FunctionDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.FunctionDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.FunctionDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.FunctionDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A function can be instantiated when the runtime can bind every attr
+   * with a value. When a GraphDef has a call to a function, it must
+   * have binding for every attr defined in the signature.
+   * TODO(zhifengc):
+   *   * device spec, etc.
+   * 
+ * + * Protobuf type {@code tensorflow.FunctionDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.FunctionDef) + org.tensorflow.framework.FunctionDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 5: + return internalGetAttr(); + case 4: + return internalGetRet(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 5: + return internalGetMutableAttr(); + case 4: + return internalGetMutableRet(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.FunctionDef.class, org.tensorflow.framework.FunctionDef.Builder.class); + } + + // Construct using org.tensorflow.framework.FunctionDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getNodeFieldBuilder(); + getNodeDefFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (signatureBuilder_ == null) { + signature_ = null; + } else { + signature_ = null; + signatureBuilder_ = null; + } + internalGetMutableAttr().clear(); + if (nodeBuilder_ == null) { + node_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + nodeBuilder_.clear(); + } + if (nodeDefBuilder_ == null) { + nodeDef_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + nodeDefBuilder_.clear(); + } + internalGetMutableRet().clear(); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDef_descriptor; + } + + public org.tensorflow.framework.FunctionDef getDefaultInstanceForType() { + return org.tensorflow.framework.FunctionDef.getDefaultInstance(); + } + + public org.tensorflow.framework.FunctionDef build() { + org.tensorflow.framework.FunctionDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.FunctionDef buildPartial() { + org.tensorflow.framework.FunctionDef result = new org.tensorflow.framework.FunctionDef(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (signatureBuilder_ == null) { + result.signature_ = signature_; + } else { + result.signature_ = signatureBuilder_.build(); + } + result.attr_ = internalGetAttr(); + result.attr_.makeImmutable(); + if (nodeBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + node_ = java.util.Collections.unmodifiableList(node_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.node_ = node_; + } else { + result.node_ = nodeBuilder_.build(); + } + if (nodeDefBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + nodeDef_ = java.util.Collections.unmodifiableList(nodeDef_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.nodeDef_ = nodeDef_; + } else { + result.nodeDef_ = nodeDefBuilder_.build(); + } + result.ret_ = internalGetRet(); + result.ret_.makeImmutable(); + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.FunctionDef) { + return mergeFrom((org.tensorflow.framework.FunctionDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.FunctionDef other) { + if (other == org.tensorflow.framework.FunctionDef.getDefaultInstance()) return this; + if (other.hasSignature()) { + mergeSignature(other.getSignature()); + } + internalGetMutableAttr().mergeFrom( + other.internalGetAttr()); + if (nodeBuilder_ == null) { + if (!other.node_.isEmpty()) { + if (node_.isEmpty()) { + node_ = other.node_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureNodeIsMutable(); + node_.addAll(other.node_); + } + onChanged(); + } + } else { + if (!other.node_.isEmpty()) { + if (nodeBuilder_.isEmpty()) { + nodeBuilder_.dispose(); + nodeBuilder_ = null; + node_ = other.node_; + bitField0_ = (bitField0_ & ~0x00000004); + nodeBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getNodeFieldBuilder() : null; + } else { + nodeBuilder_.addAllMessages(other.node_); + } + } + } + if (nodeDefBuilder_ == null) { + if (!other.nodeDef_.isEmpty()) { + if (nodeDef_.isEmpty()) { + nodeDef_ = other.nodeDef_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureNodeDefIsMutable(); + nodeDef_.addAll(other.nodeDef_); + } + onChanged(); + } + } else { + if (!other.nodeDef_.isEmpty()) { + if (nodeDefBuilder_.isEmpty()) { + nodeDefBuilder_.dispose(); + nodeDefBuilder_ = null; + nodeDef_ = other.nodeDef_; + bitField0_ = (bitField0_ & ~0x00000008); + nodeDefBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getNodeDefFieldBuilder() : null; + } else { + nodeDefBuilder_.addAllMessages(other.nodeDef_); + } + } + } + internalGetMutableRet().mergeFrom( + other.internalGetRet()); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.FunctionDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.FunctionDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private org.tensorflow.framework.OpDef signature_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.OpDef, org.tensorflow.framework.OpDef.Builder, org.tensorflow.framework.OpDefOrBuilder> signatureBuilder_; + /** + *
+     * The definition of the function's name, arguments, return values,
+     * attrs etc.
+     * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public boolean hasSignature() { + return signatureBuilder_ != null || signature_ != null; + } + /** + *
+     * The definition of the function's name, arguments, return values,
+     * attrs etc.
+     * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public org.tensorflow.framework.OpDef getSignature() { + if (signatureBuilder_ == null) { + return signature_ == null ? org.tensorflow.framework.OpDef.getDefaultInstance() : signature_; + } else { + return signatureBuilder_.getMessage(); + } + } + /** + *
+     * The definition of the function's name, arguments, return values,
+     * attrs etc.
+     * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public Builder setSignature(org.tensorflow.framework.OpDef value) { + if (signatureBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + signature_ = value; + onChanged(); + } else { + signatureBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * The definition of the function's name, arguments, return values,
+     * attrs etc.
+     * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public Builder setSignature( + org.tensorflow.framework.OpDef.Builder builderForValue) { + if (signatureBuilder_ == null) { + signature_ = builderForValue.build(); + onChanged(); + } else { + signatureBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * The definition of the function's name, arguments, return values,
+     * attrs etc.
+     * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public Builder mergeSignature(org.tensorflow.framework.OpDef value) { + if (signatureBuilder_ == null) { + if (signature_ != null) { + signature_ = + org.tensorflow.framework.OpDef.newBuilder(signature_).mergeFrom(value).buildPartial(); + } else { + signature_ = value; + } + onChanged(); + } else { + signatureBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * The definition of the function's name, arguments, return values,
+     * attrs etc.
+     * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public Builder clearSignature() { + if (signatureBuilder_ == null) { + signature_ = null; + onChanged(); + } else { + signature_ = null; + signatureBuilder_ = null; + } + + return this; + } + /** + *
+     * The definition of the function's name, arguments, return values,
+     * attrs etc.
+     * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public org.tensorflow.framework.OpDef.Builder getSignatureBuilder() { + + onChanged(); + return getSignatureFieldBuilder().getBuilder(); + } + /** + *
+     * The definition of the function's name, arguments, return values,
+     * attrs etc.
+     * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + public org.tensorflow.framework.OpDefOrBuilder getSignatureOrBuilder() { + if (signatureBuilder_ != null) { + return signatureBuilder_.getMessageOrBuilder(); + } else { + return signature_ == null ? + org.tensorflow.framework.OpDef.getDefaultInstance() : signature_; + } + } + /** + *
+     * The definition of the function's name, arguments, return values,
+     * attrs etc.
+     * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.OpDef, org.tensorflow.framework.OpDef.Builder, org.tensorflow.framework.OpDefOrBuilder> + getSignatureFieldBuilder() { + if (signatureBuilder_ == null) { + signatureBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.OpDef, org.tensorflow.framework.OpDef.Builder, org.tensorflow.framework.OpDefOrBuilder>( + getSignature(), + getParentForChildren(), + isClean()); + signature_ = null; + } + return signatureBuilder_; + } + + private com.google.protobuf.MapField< + java.lang.String, org.tensorflow.framework.AttrValue> attr_; + private com.google.protobuf.MapField + internalGetAttr() { + if (attr_ == null) { + return com.google.protobuf.MapField.emptyMapField( + AttrDefaultEntryHolder.defaultEntry); + } + return attr_; + } + private com.google.protobuf.MapField + internalGetMutableAttr() { + onChanged();; + if (attr_ == null) { + attr_ = com.google.protobuf.MapField.newMapField( + AttrDefaultEntryHolder.defaultEntry); + } + if (!attr_.isMutable()) { + attr_ = attr_.copy(); + } + return attr_; + } + + public int getAttrCount() { + return internalGetAttr().getMap().size(); + } + /** + *
+     * Attributes specific to this function definition.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public boolean containsAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetAttr().getMap().containsKey(key); + } + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getAttr() { + return getAttrMap(); + } + /** + *
+     * Attributes specific to this function definition.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public java.util.Map getAttrMap() { + return internalGetAttr().getMap(); + } + /** + *
+     * Attributes specific to this function definition.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Attributes specific to this function definition.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearAttr() { + getMutableAttr().clear(); + return this; + } + /** + *
+     * Attributes specific to this function definition.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public Builder removeAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + getMutableAttr().remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableAttr() { + return internalGetMutableAttr().getMutableMap(); + } + /** + *
+     * Attributes specific to this function definition.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + public Builder putAttr( + java.lang.String key, + org.tensorflow.framework.AttrValue value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + getMutableAttr().put(key, value); + return this; + } + /** + *
+     * Attributes specific to this function definition.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public Builder putAllAttr( + java.util.Map values) { + getMutableAttr().putAll(values); + return this; + } + + private java.util.List node_ = + java.util.Collections.emptyList(); + private void ensureNodeIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + node_ = new java.util.ArrayList(node_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.FunctionDef.Node, org.tensorflow.framework.FunctionDef.Node.Builder, org.tensorflow.framework.FunctionDef.NodeOrBuilder> nodeBuilder_; + + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public java.util.List getNodeList() { + if (nodeBuilder_ == null) { + return java.util.Collections.unmodifiableList(node_); + } else { + return nodeBuilder_.getMessageList(); + } + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public int getNodeCount() { + if (nodeBuilder_ == null) { + return node_.size(); + } else { + return nodeBuilder_.getCount(); + } + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public org.tensorflow.framework.FunctionDef.Node getNode(int index) { + if (nodeBuilder_ == null) { + return node_.get(index); + } else { + return nodeBuilder_.getMessage(index); + } + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public Builder setNode( + int index, org.tensorflow.framework.FunctionDef.Node value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeIsMutable(); + node_.set(index, value); + onChanged(); + } else { + nodeBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public Builder setNode( + int index, org.tensorflow.framework.FunctionDef.Node.Builder builderForValue) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.set(index, builderForValue.build()); + onChanged(); + } else { + nodeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public Builder addNode(org.tensorflow.framework.FunctionDef.Node value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeIsMutable(); + node_.add(value); + onChanged(); + } else { + nodeBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public Builder addNode( + int index, org.tensorflow.framework.FunctionDef.Node value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeIsMutable(); + node_.add(index, value); + onChanged(); + } else { + nodeBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public Builder addNode( + org.tensorflow.framework.FunctionDef.Node.Builder builderForValue) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.add(builderForValue.build()); + onChanged(); + } else { + nodeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public Builder addNode( + int index, org.tensorflow.framework.FunctionDef.Node.Builder builderForValue) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.add(index, builderForValue.build()); + onChanged(); + } else { + nodeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public Builder addAllNode( + java.lang.Iterable values) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, node_); + onChanged(); + } else { + nodeBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public Builder clearNode() { + if (nodeBuilder_ == null) { + node_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + nodeBuilder_.clear(); + } + return this; + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public Builder removeNode(int index) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.remove(index); + onChanged(); + } else { + nodeBuilder_.remove(index); + } + return this; + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public org.tensorflow.framework.FunctionDef.Node.Builder getNodeBuilder( + int index) { + return getNodeFieldBuilder().getBuilder(index); + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public org.tensorflow.framework.FunctionDef.NodeOrBuilder getNodeOrBuilder( + int index) { + if (nodeBuilder_ == null) { + return node_.get(index); } else { + return nodeBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public java.util.List + getNodeOrBuilderList() { + if (nodeBuilder_ != null) { + return nodeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(node_); + } + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public org.tensorflow.framework.FunctionDef.Node.Builder addNodeBuilder() { + return getNodeFieldBuilder().addBuilder( + org.tensorflow.framework.FunctionDef.Node.getDefaultInstance()); + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public org.tensorflow.framework.FunctionDef.Node.Builder addNodeBuilder( + int index) { + return getNodeFieldBuilder().addBuilder( + index, org.tensorflow.framework.FunctionDef.Node.getDefaultInstance()); + } + /** + *
+     * The body of the function.
+     * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + public java.util.List + getNodeBuilderList() { + return getNodeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.FunctionDef.Node, org.tensorflow.framework.FunctionDef.Node.Builder, org.tensorflow.framework.FunctionDef.NodeOrBuilder> + getNodeFieldBuilder() { + if (nodeBuilder_ == null) { + nodeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.FunctionDef.Node, org.tensorflow.framework.FunctionDef.Node.Builder, org.tensorflow.framework.FunctionDef.NodeOrBuilder>( + node_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + node_ = null; + } + return nodeBuilder_; + } + + private java.util.List nodeDef_ = + java.util.Collections.emptyList(); + private void ensureNodeDefIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + nodeDef_ = new java.util.ArrayList(nodeDef_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeDef, org.tensorflow.framework.NodeDef.Builder, org.tensorflow.framework.NodeDefOrBuilder> nodeDefBuilder_; + + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public java.util.List getNodeDefList() { + if (nodeDefBuilder_ == null) { + return java.util.Collections.unmodifiableList(nodeDef_); + } else { + return nodeDefBuilder_.getMessageList(); + } + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public int getNodeDefCount() { + if (nodeDefBuilder_ == null) { + return nodeDef_.size(); + } else { + return nodeDefBuilder_.getCount(); + } + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public org.tensorflow.framework.NodeDef getNodeDef(int index) { + if (nodeDefBuilder_ == null) { + return nodeDef_.get(index); + } else { + return nodeDefBuilder_.getMessage(index); + } + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public Builder setNodeDef( + int index, org.tensorflow.framework.NodeDef value) { + if (nodeDefBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeDefIsMutable(); + nodeDef_.set(index, value); + onChanged(); + } else { + nodeDefBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public Builder setNodeDef( + int index, org.tensorflow.framework.NodeDef.Builder builderForValue) { + if (nodeDefBuilder_ == null) { + ensureNodeDefIsMutable(); + nodeDef_.set(index, builderForValue.build()); + onChanged(); + } else { + nodeDefBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public Builder addNodeDef(org.tensorflow.framework.NodeDef value) { + if (nodeDefBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeDefIsMutable(); + nodeDef_.add(value); + onChanged(); + } else { + nodeDefBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public Builder addNodeDef( + int index, org.tensorflow.framework.NodeDef value) { + if (nodeDefBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeDefIsMutable(); + nodeDef_.add(index, value); + onChanged(); + } else { + nodeDefBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public Builder addNodeDef( + org.tensorflow.framework.NodeDef.Builder builderForValue) { + if (nodeDefBuilder_ == null) { + ensureNodeDefIsMutable(); + nodeDef_.add(builderForValue.build()); + onChanged(); + } else { + nodeDefBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public Builder addNodeDef( + int index, org.tensorflow.framework.NodeDef.Builder builderForValue) { + if (nodeDefBuilder_ == null) { + ensureNodeDefIsMutable(); + nodeDef_.add(index, builderForValue.build()); + onChanged(); + } else { + nodeDefBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public Builder addAllNodeDef( + java.lang.Iterable values) { + if (nodeDefBuilder_ == null) { + ensureNodeDefIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, nodeDef_); + onChanged(); + } else { + nodeDefBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public Builder clearNodeDef() { + if (nodeDefBuilder_ == null) { + nodeDef_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + nodeDefBuilder_.clear(); + } + return this; + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public Builder removeNodeDef(int index) { + if (nodeDefBuilder_ == null) { + ensureNodeDefIsMutable(); + nodeDef_.remove(index); + onChanged(); + } else { + nodeDefBuilder_.remove(index); + } + return this; + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public org.tensorflow.framework.NodeDef.Builder getNodeDefBuilder( + int index) { + return getNodeDefFieldBuilder().getBuilder(index); + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public org.tensorflow.framework.NodeDefOrBuilder getNodeDefOrBuilder( + int index) { + if (nodeDefBuilder_ == null) { + return nodeDef_.get(index); } else { + return nodeDefBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public java.util.List + getNodeDefOrBuilderList() { + if (nodeDefBuilder_ != null) { + return nodeDefBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(nodeDef_); + } + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public org.tensorflow.framework.NodeDef.Builder addNodeDefBuilder() { + return getNodeDefFieldBuilder().addBuilder( + org.tensorflow.framework.NodeDef.getDefaultInstance()); + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public org.tensorflow.framework.NodeDef.Builder addNodeDefBuilder( + int index) { + return getNodeDefFieldBuilder().addBuilder( + index, org.tensorflow.framework.NodeDef.getDefaultInstance()); + } + /** + *
+     * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+     * may have values of type `placeholder` and the `input` field uses
+     * the "output" format above.
+     * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + public java.util.List + getNodeDefBuilderList() { + return getNodeDefFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeDef, org.tensorflow.framework.NodeDef.Builder, org.tensorflow.framework.NodeDefOrBuilder> + getNodeDefFieldBuilder() { + if (nodeDefBuilder_ == null) { + nodeDefBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeDef, org.tensorflow.framework.NodeDef.Builder, org.tensorflow.framework.NodeDefOrBuilder>( + nodeDef_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + nodeDef_ = null; + } + return nodeDefBuilder_; + } + + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> ret_; + private com.google.protobuf.MapField + internalGetRet() { + if (ret_ == null) { + return com.google.protobuf.MapField.emptyMapField( + RetDefaultEntryHolder.defaultEntry); + } + return ret_; + } + private com.google.protobuf.MapField + internalGetMutableRet() { + onChanged();; + if (ret_ == null) { + ret_ = com.google.protobuf.MapField.newMapField( + RetDefaultEntryHolder.defaultEntry); + } + if (!ret_.isMutable()) { + ret_ = ret_.copy(); + } + return ret_; + } + + public int getRetCount() { + return internalGetRet().getMap().size(); + } + /** + *
+     * A mapping from the output arg names from `signature` to the
+     * outputs from `node_def` that should be returned by the function.
+     * 
+ * + * map<string, string> ret = 4; + */ + + public boolean containsRet( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetRet().getMap().containsKey(key); + } + /** + * Use {@link #getRetMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getRet() { + return getRetMap(); + } + /** + *
+     * A mapping from the output arg names from `signature` to the
+     * outputs from `node_def` that should be returned by the function.
+     * 
+ * + * map<string, string> ret = 4; + */ + + public java.util.Map getRetMap() { + return internalGetRet().getMap(); + } + /** + *
+     * A mapping from the output arg names from `signature` to the
+     * outputs from `node_def` that should be returned by the function.
+     * 
+ * + * map<string, string> ret = 4; + */ + + public java.lang.String getRetOrDefault( + java.lang.String key, + java.lang.String defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetRet().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * A mapping from the output arg names from `signature` to the
+     * outputs from `node_def` that should be returned by the function.
+     * 
+ * + * map<string, string> ret = 4; + */ + + public java.lang.String getRetOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetRet().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearRet() { + getMutableRet().clear(); + return this; + } + /** + *
+     * A mapping from the output arg names from `signature` to the
+     * outputs from `node_def` that should be returned by the function.
+     * 
+ * + * map<string, string> ret = 4; + */ + + public Builder removeRet( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + getMutableRet().remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableRet() { + return internalGetMutableRet().getMutableMap(); + } + /** + *
+     * A mapping from the output arg names from `signature` to the
+     * outputs from `node_def` that should be returned by the function.
+     * 
+ * + * map<string, string> ret = 4; + */ + public Builder putRet( + java.lang.String key, + java.lang.String value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + getMutableRet().put(key, value); + return this; + } + /** + *
+     * A mapping from the output arg names from `signature` to the
+     * outputs from `node_def` that should be returned by the function.
+     * 
+ * + * map<string, string> ret = 4; + */ + + public Builder putAllRet( + java.util.Map values) { + getMutableRet().putAll(values); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.FunctionDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.FunctionDef) + private static final org.tensorflow.framework.FunctionDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.FunctionDef(); + } + + public static org.tensorflow.framework.FunctionDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FunctionDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FunctionDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.FunctionDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefLibrary.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefLibrary.java new file mode 100644 index 0000000000000..286bba8dd3ec9 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefLibrary.java @@ -0,0 +1,1063 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/function.proto + +package org.tensorflow.framework; + +/** + *
+ * A library is a set of named functions.
+ * 
+ * + * Protobuf type {@code tensorflow.FunctionDefLibrary} + */ +public final class FunctionDefLibrary extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.FunctionDefLibrary) + FunctionDefLibraryOrBuilder { + // Use FunctionDefLibrary.newBuilder() to construct. + private FunctionDefLibrary(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FunctionDefLibrary() { + function_ = java.util.Collections.emptyList(); + gradient_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private FunctionDefLibrary( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + function_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + function_.add( + input.readMessage(org.tensorflow.framework.FunctionDef.parser(), extensionRegistry)); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + gradient_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + gradient_.add( + input.readMessage(org.tensorflow.framework.GradientDef.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + function_ = java.util.Collections.unmodifiableList(function_); + } + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + gradient_ = java.util.Collections.unmodifiableList(gradient_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDefLibrary_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDefLibrary_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.FunctionDefLibrary.class, org.tensorflow.framework.FunctionDefLibrary.Builder.class); + } + + public static final int FUNCTION_FIELD_NUMBER = 1; + private java.util.List function_; + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public java.util.List getFunctionList() { + return function_; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public java.util.List + getFunctionOrBuilderList() { + return function_; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public int getFunctionCount() { + return function_.size(); + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public org.tensorflow.framework.FunctionDef getFunction(int index) { + return function_.get(index); + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public org.tensorflow.framework.FunctionDefOrBuilder getFunctionOrBuilder( + int index) { + return function_.get(index); + } + + public static final int GRADIENT_FIELD_NUMBER = 2; + private java.util.List gradient_; + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public java.util.List getGradientList() { + return gradient_; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public java.util.List + getGradientOrBuilderList() { + return gradient_; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public int getGradientCount() { + return gradient_.size(); + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public org.tensorflow.framework.GradientDef getGradient(int index) { + return gradient_.get(index); + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public org.tensorflow.framework.GradientDefOrBuilder getGradientOrBuilder( + int index) { + return gradient_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < function_.size(); i++) { + output.writeMessage(1, function_.get(i)); + } + for (int i = 0; i < gradient_.size(); i++) { + output.writeMessage(2, gradient_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < function_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, function_.get(i)); + } + for (int i = 0; i < gradient_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, gradient_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.FunctionDefLibrary)) { + return super.equals(obj); + } + org.tensorflow.framework.FunctionDefLibrary other = (org.tensorflow.framework.FunctionDefLibrary) obj; + + boolean result = true; + result = result && getFunctionList() + .equals(other.getFunctionList()); + result = result && getGradientList() + .equals(other.getGradientList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getFunctionCount() > 0) { + hash = (37 * hash) + FUNCTION_FIELD_NUMBER; + hash = (53 * hash) + getFunctionList().hashCode(); + } + if (getGradientCount() > 0) { + hash = (37 * hash) + GRADIENT_FIELD_NUMBER; + hash = (53 * hash) + getGradientList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.FunctionDefLibrary parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.FunctionDefLibrary parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDefLibrary parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.FunctionDefLibrary parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDefLibrary parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.FunctionDefLibrary parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDefLibrary parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.FunctionDefLibrary parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.FunctionDefLibrary parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.FunctionDefLibrary parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.FunctionDefLibrary prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A library is a set of named functions.
+   * 
+ * + * Protobuf type {@code tensorflow.FunctionDefLibrary} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.FunctionDefLibrary) + org.tensorflow.framework.FunctionDefLibraryOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDefLibrary_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDefLibrary_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.FunctionDefLibrary.class, org.tensorflow.framework.FunctionDefLibrary.Builder.class); + } + + // Construct using org.tensorflow.framework.FunctionDefLibrary.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getFunctionFieldBuilder(); + getGradientFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (functionBuilder_ == null) { + function_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + functionBuilder_.clear(); + } + if (gradientBuilder_ == null) { + gradient_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + gradientBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_FunctionDefLibrary_descriptor; + } + + public org.tensorflow.framework.FunctionDefLibrary getDefaultInstanceForType() { + return org.tensorflow.framework.FunctionDefLibrary.getDefaultInstance(); + } + + public org.tensorflow.framework.FunctionDefLibrary build() { + org.tensorflow.framework.FunctionDefLibrary result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.FunctionDefLibrary buildPartial() { + org.tensorflow.framework.FunctionDefLibrary result = new org.tensorflow.framework.FunctionDefLibrary(this); + int from_bitField0_ = bitField0_; + if (functionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + function_ = java.util.Collections.unmodifiableList(function_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.function_ = function_; + } else { + result.function_ = functionBuilder_.build(); + } + if (gradientBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + gradient_ = java.util.Collections.unmodifiableList(gradient_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.gradient_ = gradient_; + } else { + result.gradient_ = gradientBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.FunctionDefLibrary) { + return mergeFrom((org.tensorflow.framework.FunctionDefLibrary)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.FunctionDefLibrary other) { + if (other == org.tensorflow.framework.FunctionDefLibrary.getDefaultInstance()) return this; + if (functionBuilder_ == null) { + if (!other.function_.isEmpty()) { + if (function_.isEmpty()) { + function_ = other.function_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFunctionIsMutable(); + function_.addAll(other.function_); + } + onChanged(); + } + } else { + if (!other.function_.isEmpty()) { + if (functionBuilder_.isEmpty()) { + functionBuilder_.dispose(); + functionBuilder_ = null; + function_ = other.function_; + bitField0_ = (bitField0_ & ~0x00000001); + functionBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getFunctionFieldBuilder() : null; + } else { + functionBuilder_.addAllMessages(other.function_); + } + } + } + if (gradientBuilder_ == null) { + if (!other.gradient_.isEmpty()) { + if (gradient_.isEmpty()) { + gradient_ = other.gradient_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureGradientIsMutable(); + gradient_.addAll(other.gradient_); + } + onChanged(); + } + } else { + if (!other.gradient_.isEmpty()) { + if (gradientBuilder_.isEmpty()) { + gradientBuilder_.dispose(); + gradientBuilder_ = null; + gradient_ = other.gradient_; + bitField0_ = (bitField0_ & ~0x00000002); + gradientBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getGradientFieldBuilder() : null; + } else { + gradientBuilder_.addAllMessages(other.gradient_); + } + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.FunctionDefLibrary parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.FunctionDefLibrary) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List function_ = + java.util.Collections.emptyList(); + private void ensureFunctionIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + function_ = new java.util.ArrayList(function_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.FunctionDef, org.tensorflow.framework.FunctionDef.Builder, org.tensorflow.framework.FunctionDefOrBuilder> functionBuilder_; + + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public java.util.List getFunctionList() { + if (functionBuilder_ == null) { + return java.util.Collections.unmodifiableList(function_); + } else { + return functionBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public int getFunctionCount() { + if (functionBuilder_ == null) { + return function_.size(); + } else { + return functionBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public org.tensorflow.framework.FunctionDef getFunction(int index) { + if (functionBuilder_ == null) { + return function_.get(index); + } else { + return functionBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public Builder setFunction( + int index, org.tensorflow.framework.FunctionDef value) { + if (functionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFunctionIsMutable(); + function_.set(index, value); + onChanged(); + } else { + functionBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public Builder setFunction( + int index, org.tensorflow.framework.FunctionDef.Builder builderForValue) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + function_.set(index, builderForValue.build()); + onChanged(); + } else { + functionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public Builder addFunction(org.tensorflow.framework.FunctionDef value) { + if (functionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFunctionIsMutable(); + function_.add(value); + onChanged(); + } else { + functionBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public Builder addFunction( + int index, org.tensorflow.framework.FunctionDef value) { + if (functionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFunctionIsMutable(); + function_.add(index, value); + onChanged(); + } else { + functionBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public Builder addFunction( + org.tensorflow.framework.FunctionDef.Builder builderForValue) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + function_.add(builderForValue.build()); + onChanged(); + } else { + functionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public Builder addFunction( + int index, org.tensorflow.framework.FunctionDef.Builder builderForValue) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + function_.add(index, builderForValue.build()); + onChanged(); + } else { + functionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public Builder addAllFunction( + java.lang.Iterable values) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, function_); + onChanged(); + } else { + functionBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public Builder clearFunction() { + if (functionBuilder_ == null) { + function_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + functionBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public Builder removeFunction(int index) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + function_.remove(index); + onChanged(); + } else { + functionBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public org.tensorflow.framework.FunctionDef.Builder getFunctionBuilder( + int index) { + return getFunctionFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public org.tensorflow.framework.FunctionDefOrBuilder getFunctionOrBuilder( + int index) { + if (functionBuilder_ == null) { + return function_.get(index); } else { + return functionBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public java.util.List + getFunctionOrBuilderList() { + if (functionBuilder_ != null) { + return functionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(function_); + } + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public org.tensorflow.framework.FunctionDef.Builder addFunctionBuilder() { + return getFunctionFieldBuilder().addBuilder( + org.tensorflow.framework.FunctionDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public org.tensorflow.framework.FunctionDef.Builder addFunctionBuilder( + int index) { + return getFunctionFieldBuilder().addBuilder( + index, org.tensorflow.framework.FunctionDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + public java.util.List + getFunctionBuilderList() { + return getFunctionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.FunctionDef, org.tensorflow.framework.FunctionDef.Builder, org.tensorflow.framework.FunctionDefOrBuilder> + getFunctionFieldBuilder() { + if (functionBuilder_ == null) { + functionBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.FunctionDef, org.tensorflow.framework.FunctionDef.Builder, org.tensorflow.framework.FunctionDefOrBuilder>( + function_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + function_ = null; + } + return functionBuilder_; + } + + private java.util.List gradient_ = + java.util.Collections.emptyList(); + private void ensureGradientIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + gradient_ = new java.util.ArrayList(gradient_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.GradientDef, org.tensorflow.framework.GradientDef.Builder, org.tensorflow.framework.GradientDefOrBuilder> gradientBuilder_; + + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public java.util.List getGradientList() { + if (gradientBuilder_ == null) { + return java.util.Collections.unmodifiableList(gradient_); + } else { + return gradientBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public int getGradientCount() { + if (gradientBuilder_ == null) { + return gradient_.size(); + } else { + return gradientBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public org.tensorflow.framework.GradientDef getGradient(int index) { + if (gradientBuilder_ == null) { + return gradient_.get(index); + } else { + return gradientBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public Builder setGradient( + int index, org.tensorflow.framework.GradientDef value) { + if (gradientBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureGradientIsMutable(); + gradient_.set(index, value); + onChanged(); + } else { + gradientBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public Builder setGradient( + int index, org.tensorflow.framework.GradientDef.Builder builderForValue) { + if (gradientBuilder_ == null) { + ensureGradientIsMutable(); + gradient_.set(index, builderForValue.build()); + onChanged(); + } else { + gradientBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public Builder addGradient(org.tensorflow.framework.GradientDef value) { + if (gradientBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureGradientIsMutable(); + gradient_.add(value); + onChanged(); + } else { + gradientBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public Builder addGradient( + int index, org.tensorflow.framework.GradientDef value) { + if (gradientBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureGradientIsMutable(); + gradient_.add(index, value); + onChanged(); + } else { + gradientBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public Builder addGradient( + org.tensorflow.framework.GradientDef.Builder builderForValue) { + if (gradientBuilder_ == null) { + ensureGradientIsMutable(); + gradient_.add(builderForValue.build()); + onChanged(); + } else { + gradientBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public Builder addGradient( + int index, org.tensorflow.framework.GradientDef.Builder builderForValue) { + if (gradientBuilder_ == null) { + ensureGradientIsMutable(); + gradient_.add(index, builderForValue.build()); + onChanged(); + } else { + gradientBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public Builder addAllGradient( + java.lang.Iterable values) { + if (gradientBuilder_ == null) { + ensureGradientIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, gradient_); + onChanged(); + } else { + gradientBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public Builder clearGradient() { + if (gradientBuilder_ == null) { + gradient_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + gradientBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public Builder removeGradient(int index) { + if (gradientBuilder_ == null) { + ensureGradientIsMutable(); + gradient_.remove(index); + onChanged(); + } else { + gradientBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public org.tensorflow.framework.GradientDef.Builder getGradientBuilder( + int index) { + return getGradientFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public org.tensorflow.framework.GradientDefOrBuilder getGradientOrBuilder( + int index) { + if (gradientBuilder_ == null) { + return gradient_.get(index); } else { + return gradientBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public java.util.List + getGradientOrBuilderList() { + if (gradientBuilder_ != null) { + return gradientBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(gradient_); + } + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public org.tensorflow.framework.GradientDef.Builder addGradientBuilder() { + return getGradientFieldBuilder().addBuilder( + org.tensorflow.framework.GradientDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public org.tensorflow.framework.GradientDef.Builder addGradientBuilder( + int index) { + return getGradientFieldBuilder().addBuilder( + index, org.tensorflow.framework.GradientDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + public java.util.List + getGradientBuilderList() { + return getGradientFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.GradientDef, org.tensorflow.framework.GradientDef.Builder, org.tensorflow.framework.GradientDefOrBuilder> + getGradientFieldBuilder() { + if (gradientBuilder_ == null) { + gradientBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.GradientDef, org.tensorflow.framework.GradientDef.Builder, org.tensorflow.framework.GradientDefOrBuilder>( + gradient_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + gradient_ = null; + } + return gradientBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.FunctionDefLibrary) + } + + // @@protoc_insertion_point(class_scope:tensorflow.FunctionDefLibrary) + private static final org.tensorflow.framework.FunctionDefLibrary DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.FunctionDefLibrary(); + } + + public static org.tensorflow.framework.FunctionDefLibrary getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public FunctionDefLibrary parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new FunctionDefLibrary(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.FunctionDefLibrary getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefLibraryOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefLibraryOrBuilder.java new file mode 100644 index 0000000000000..137d9cf8819f1 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefLibraryOrBuilder.java @@ -0,0 +1,57 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/function.proto + +package org.tensorflow.framework; + +public interface FunctionDefLibraryOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.FunctionDefLibrary) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + java.util.List + getFunctionList(); + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + org.tensorflow.framework.FunctionDef getFunction(int index); + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + int getFunctionCount(); + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + java.util.List + getFunctionOrBuilderList(); + /** + * repeated .tensorflow.FunctionDef function = 1; + */ + org.tensorflow.framework.FunctionDefOrBuilder getFunctionOrBuilder( + int index); + + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + java.util.List + getGradientList(); + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + org.tensorflow.framework.GradientDef getGradient(int index); + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + int getGradientCount(); + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + java.util.List + getGradientOrBuilderList(); + /** + * repeated .tensorflow.GradientDef gradient = 2; + */ + org.tensorflow.framework.GradientDefOrBuilder getGradientOrBuilder( + int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefOrBuilder.java new file mode 100644 index 0000000000000..fdfb02a75893f --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionDefOrBuilder.java @@ -0,0 +1,248 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/function.proto + +package org.tensorflow.framework; + +public interface FunctionDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.FunctionDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * The definition of the function's name, arguments, return values,
+   * attrs etc.
+   * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + boolean hasSignature(); + /** + *
+   * The definition of the function's name, arguments, return values,
+   * attrs etc.
+   * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + org.tensorflow.framework.OpDef getSignature(); + /** + *
+   * The definition of the function's name, arguments, return values,
+   * attrs etc.
+   * 
+ * + * optional .tensorflow.OpDef signature = 1; + */ + org.tensorflow.framework.OpDefOrBuilder getSignatureOrBuilder(); + + /** + *
+   * Attributes specific to this function definition.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + int getAttrCount(); + /** + *
+   * Attributes specific to this function definition.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + boolean containsAttr( + java.lang.String key); + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getAttr(); + /** + *
+   * Attributes specific to this function definition.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + java.util.Map + getAttrMap(); + /** + *
+   * Attributes specific to this function definition.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue); + /** + *
+   * Attributes specific to this function definition.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key); + + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + java.util.List + getNodeList(); + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + org.tensorflow.framework.FunctionDef.Node getNode(int index); + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + int getNodeCount(); + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + java.util.List + getNodeOrBuilderList(); + /** + *
+   * The body of the function.
+   * 
+ * + * repeated .tensorflow.FunctionDef.Node node = 2; + */ + org.tensorflow.framework.FunctionDef.NodeOrBuilder getNodeOrBuilder( + int index); + + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + java.util.List + getNodeDefList(); + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + org.tensorflow.framework.NodeDef getNodeDef(int index); + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + int getNodeDefCount(); + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + java.util.List + getNodeDefOrBuilderList(); + /** + *
+   * The body of the function.  Unlike the NodeDefs in a GraphDef, attrs
+   * may have values of type `placeholder` and the `input` field uses
+   * the "output" format above.
+   * 
+ * + * repeated .tensorflow.NodeDef node_def = 3; + */ + org.tensorflow.framework.NodeDefOrBuilder getNodeDefOrBuilder( + int index); + + /** + *
+   * A mapping from the output arg names from `signature` to the
+   * outputs from `node_def` that should be returned by the function.
+   * 
+ * + * map<string, string> ret = 4; + */ + int getRetCount(); + /** + *
+   * A mapping from the output arg names from `signature` to the
+   * outputs from `node_def` that should be returned by the function.
+   * 
+ * + * map<string, string> ret = 4; + */ + boolean containsRet( + java.lang.String key); + /** + * Use {@link #getRetMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getRet(); + /** + *
+   * A mapping from the output arg names from `signature` to the
+   * outputs from `node_def` that should be returned by the function.
+   * 
+ * + * map<string, string> ret = 4; + */ + java.util.Map + getRetMap(); + /** + *
+   * A mapping from the output arg names from `signature` to the
+   * outputs from `node_def` that should be returned by the function.
+   * 
+ * + * map<string, string> ret = 4; + */ + + java.lang.String getRetOrDefault( + java.lang.String key, + java.lang.String defaultValue); + /** + *
+   * A mapping from the output arg names from `signature` to the
+   * outputs from `node_def` that should be returned by the function.
+   * 
+ * + * map<string, string> ret = 4; + */ + + java.lang.String getRetOrThrow( + java.lang.String key); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionProtos.java new file mode 100644 index 0000000000000..4bc6da6337325 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/FunctionProtos.java @@ -0,0 +1,149 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/function.proto + +package org.tensorflow.framework; + +public final class FunctionProtos { + private FunctionProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_FunctionDefLibrary_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_FunctionDefLibrary_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_FunctionDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_FunctionDef_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_FunctionDef_AttrEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_FunctionDef_AttrEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_FunctionDef_Node_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_FunctionDef_Node_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_FunctionDef_Node_AttrEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_FunctionDef_Node_AttrEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_FunctionDef_RetEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_FunctionDef_RetEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_GradientDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_GradientDef_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n(tensorflow/core/framework/function.pro" + + "to\022\ntensorflow\032*tensorflow/core/framewor" + + "k/attr_value.proto\032(tensorflow/core/fram" + + "ework/node_def.proto\032&tensorflow/core/fr" + + "amework/op_def.proto\"j\n\022FunctionDefLibra" + + "ry\022)\n\010function\030\001 \003(\0132\027.tensorflow.Functi" + + "onDef\022)\n\010gradient\030\002 \003(\0132\027.tensorflow.Gra" + + "dientDef\"\214\004\n\013FunctionDef\022$\n\tsignature\030\001 " + + "\001(\0132\021.tensorflow.OpDef\022/\n\004attr\030\005 \003(\0132!.t" + + "ensorflow.FunctionDef.AttrEntry\022*\n\004node\030", + "\002 \003(\0132\034.tensorflow.FunctionDef.Node\022%\n\010n" + + "ode_def\030\003 \003(\0132\023.tensorflow.NodeDef\022-\n\003re" + + "t\030\004 \003(\0132 .tensorflow.FunctionDef.RetEntr" + + "y\032B\n\tAttrEntry\022\013\n\003key\030\001 \001(\t\022$\n\005value\030\002 \001" + + "(\0132\025.tensorflow.AttrValue:\0028\001\032\263\001\n\004Node\022\013" + + "\n\003ret\030\001 \003(\t\022\n\n\002op\030\002 \001(\t\022\013\n\003arg\030\003 \003(\t\022\013\n\003" + + "dep\030\004 \003(\t\0224\n\004attr\030\005 \003(\0132&.tensorflow.Fun" + + "ctionDef.Node.AttrEntry\032B\n\tAttrEntry\022\013\n\003" + + "key\030\001 \001(\t\022$\n\005value\030\002 \001(\0132\025.tensorflow.At" + + "trValue:\0028\001\032*\n\010RetEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005", + "value\030\002 \001(\t:\0028\001\";\n\013GradientDef\022\025\n\rfuncti" + + "on_name\030\001 \001(\t\022\025\n\rgradient_func\030\002 \001(\tB/\n\030" + + "org.tensorflow.frameworkB\016FunctionProtos" + + "P\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.AttrValueProtos.getDescriptor(), + org.tensorflow.framework.NodeProto.getDescriptor(), + org.tensorflow.framework.OpDefProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_FunctionDefLibrary_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_FunctionDefLibrary_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_FunctionDefLibrary_descriptor, + new java.lang.String[] { "Function", "Gradient", }); + internal_static_tensorflow_FunctionDef_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_tensorflow_FunctionDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_FunctionDef_descriptor, + new java.lang.String[] { "Signature", "Attr", "Node", "NodeDef", "Ret", }); + internal_static_tensorflow_FunctionDef_AttrEntry_descriptor = + internal_static_tensorflow_FunctionDef_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_FunctionDef_AttrEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_FunctionDef_AttrEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_tensorflow_FunctionDef_Node_descriptor = + internal_static_tensorflow_FunctionDef_descriptor.getNestedTypes().get(1); + internal_static_tensorflow_FunctionDef_Node_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_FunctionDef_Node_descriptor, + new java.lang.String[] { "Ret", "Op", "Arg", "Dep", "Attr", }); + internal_static_tensorflow_FunctionDef_Node_AttrEntry_descriptor = + internal_static_tensorflow_FunctionDef_Node_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_FunctionDef_Node_AttrEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_FunctionDef_Node_AttrEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_tensorflow_FunctionDef_RetEntry_descriptor = + internal_static_tensorflow_FunctionDef_descriptor.getNestedTypes().get(2); + internal_static_tensorflow_FunctionDef_RetEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_FunctionDef_RetEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_tensorflow_GradientDef_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_tensorflow_GradientDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_GradientDef_descriptor, + new java.lang.String[] { "FunctionName", "GradientFunc", }); + org.tensorflow.framework.AttrValueProtos.getDescriptor(); + org.tensorflow.framework.NodeProto.getDescriptor(); + org.tensorflow.framework.OpDefProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GPUOptions.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GPUOptions.java new file mode 100644 index 0000000000000..538e42c6a508e --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GPUOptions.java @@ -0,0 +1,1054 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.GPUOptions} + */ +public final class GPUOptions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.GPUOptions) + GPUOptionsOrBuilder { + // Use GPUOptions.newBuilder() to construct. + private GPUOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GPUOptions() { + perProcessGpuMemoryFraction_ = 0D; + allocatorType_ = ""; + deferredDeletionBytes_ = 0L; + allowGrowth_ = false; + visibleDeviceList_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private GPUOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 9: { + + perProcessGpuMemoryFraction_ = input.readDouble(); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + allocatorType_ = s; + break; + } + case 24: { + + deferredDeletionBytes_ = input.readInt64(); + break; + } + case 32: { + + allowGrowth_ = input.readBool(); + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + visibleDeviceList_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GPUOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GPUOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.GPUOptions.class, org.tensorflow.framework.GPUOptions.Builder.class); + } + + public static final int PER_PROCESS_GPU_MEMORY_FRACTION_FIELD_NUMBER = 1; + private double perProcessGpuMemoryFraction_; + /** + *
+   * A value between 0 and 1 that indicates what fraction of the
+   * available GPU memory to pre-allocate for each process.  1 means
+   * to pre-allocate all of the GPU memory, 0.5 means the process
+   * allocates ~50% of the available GPU memory.
+   * 
+ * + * optional double per_process_gpu_memory_fraction = 1; + */ + public double getPerProcessGpuMemoryFraction() { + return perProcessGpuMemoryFraction_; + } + + public static final int ALLOCATOR_TYPE_FIELD_NUMBER = 2; + private volatile java.lang.Object allocatorType_; + /** + *
+   * The type of GPU allocation strategy to use.
+   * Allowed values:
+   * "": The empty string (default) uses a system-chosen default
+   *     which may change over time.
+   * "BFC": A "Best-fit with coalescing" algorithm, simplified from a
+   *        version of dlmalloc.
+   * 
+ * + * optional string allocator_type = 2; + */ + public java.lang.String getAllocatorType() { + java.lang.Object ref = allocatorType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorType_ = s; + return s; + } + } + /** + *
+   * The type of GPU allocation strategy to use.
+   * Allowed values:
+   * "": The empty string (default) uses a system-chosen default
+   *     which may change over time.
+   * "BFC": A "Best-fit with coalescing" algorithm, simplified from a
+   *        version of dlmalloc.
+   * 
+ * + * optional string allocator_type = 2; + */ + public com.google.protobuf.ByteString + getAllocatorTypeBytes() { + java.lang.Object ref = allocatorType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DEFERRED_DELETION_BYTES_FIELD_NUMBER = 3; + private long deferredDeletionBytes_; + /** + *
+   * Delay deletion of up to this many bytes to reduce the number of
+   * interactions with gpu driver code.  If 0, the system chooses
+   * a reasonable default (several MBs).
+   * 
+ * + * optional int64 deferred_deletion_bytes = 3; + */ + public long getDeferredDeletionBytes() { + return deferredDeletionBytes_; + } + + public static final int ALLOW_GROWTH_FIELD_NUMBER = 4; + private boolean allowGrowth_; + /** + *
+   * If true, the allocator does not pre-allocate the entire specified
+   * GPU memory region, instead starting small and growing as needed.
+   * 
+ * + * optional bool allow_growth = 4; + */ + public boolean getAllowGrowth() { + return allowGrowth_; + } + + public static final int VISIBLE_DEVICE_LIST_FIELD_NUMBER = 5; + private volatile java.lang.Object visibleDeviceList_; + /** + *
+   * A comma-separated list of GPU ids that determines the 'visible'
+   * to 'virtual' mapping of GPU devices.  For example, if TensorFlow
+   * can see 8 GPU devices in the process, and one wanted to map
+   * visible GPU devices 5 and 3 as "/gpu:0", and "/gpu:1", then one
+   * would specify this field as "5,3".  This field is similar in
+   * spirit to the CUDA_VISIBLE_DEVICES environment variable, except
+   * it applies to the visible GPU devices in the process.
+   * NOTE: The GPU driver provides the process with the visible GPUs
+   * in an order which is not guaranteed to have any correlation to
+   * the *physical* GPU id in the machine.  This field is used for
+   * remapping "visible" to "virtual", which means this operates only
+   * after the process starts.  Users are required to use vendor
+   * specific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the
+   * physical to visible device mapping prior to invoking TensorFlow.
+   * 
+ * + * optional string visible_device_list = 5; + */ + public java.lang.String getVisibleDeviceList() { + java.lang.Object ref = visibleDeviceList_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + visibleDeviceList_ = s; + return s; + } + } + /** + *
+   * A comma-separated list of GPU ids that determines the 'visible'
+   * to 'virtual' mapping of GPU devices.  For example, if TensorFlow
+   * can see 8 GPU devices in the process, and one wanted to map
+   * visible GPU devices 5 and 3 as "/gpu:0", and "/gpu:1", then one
+   * would specify this field as "5,3".  This field is similar in
+   * spirit to the CUDA_VISIBLE_DEVICES environment variable, except
+   * it applies to the visible GPU devices in the process.
+   * NOTE: The GPU driver provides the process with the visible GPUs
+   * in an order which is not guaranteed to have any correlation to
+   * the *physical* GPU id in the machine.  This field is used for
+   * remapping "visible" to "virtual", which means this operates only
+   * after the process starts.  Users are required to use vendor
+   * specific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the
+   * physical to visible device mapping prior to invoking TensorFlow.
+   * 
+ * + * optional string visible_device_list = 5; + */ + public com.google.protobuf.ByteString + getVisibleDeviceListBytes() { + java.lang.Object ref = visibleDeviceList_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + visibleDeviceList_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (perProcessGpuMemoryFraction_ != 0D) { + output.writeDouble(1, perProcessGpuMemoryFraction_); + } + if (!getAllocatorTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, allocatorType_); + } + if (deferredDeletionBytes_ != 0L) { + output.writeInt64(3, deferredDeletionBytes_); + } + if (allowGrowth_ != false) { + output.writeBool(4, allowGrowth_); + } + if (!getVisibleDeviceListBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, visibleDeviceList_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (perProcessGpuMemoryFraction_ != 0D) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(1, perProcessGpuMemoryFraction_); + } + if (!getAllocatorTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, allocatorType_); + } + if (deferredDeletionBytes_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, deferredDeletionBytes_); + } + if (allowGrowth_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, allowGrowth_); + } + if (!getVisibleDeviceListBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, visibleDeviceList_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.GPUOptions)) { + return super.equals(obj); + } + org.tensorflow.framework.GPUOptions other = (org.tensorflow.framework.GPUOptions) obj; + + boolean result = true; + result = result && ( + java.lang.Double.doubleToLongBits(getPerProcessGpuMemoryFraction()) + == java.lang.Double.doubleToLongBits( + other.getPerProcessGpuMemoryFraction())); + result = result && getAllocatorType() + .equals(other.getAllocatorType()); + result = result && (getDeferredDeletionBytes() + == other.getDeferredDeletionBytes()); + result = result && (getAllowGrowth() + == other.getAllowGrowth()); + result = result && getVisibleDeviceList() + .equals(other.getVisibleDeviceList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + PER_PROCESS_GPU_MEMORY_FRACTION_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getPerProcessGpuMemoryFraction())); + hash = (37 * hash) + ALLOCATOR_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getAllocatorType().hashCode(); + hash = (37 * hash) + DEFERRED_DELETION_BYTES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getDeferredDeletionBytes()); + hash = (37 * hash) + ALLOW_GROWTH_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getAllowGrowth()); + hash = (37 * hash) + VISIBLE_DEVICE_LIST_FIELD_NUMBER; + hash = (53 * hash) + getVisibleDeviceList().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.GPUOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.GPUOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.GPUOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.GPUOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.GPUOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GPUOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.GPUOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GPUOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.GPUOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GPUOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.GPUOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.GPUOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.GPUOptions) + org.tensorflow.framework.GPUOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GPUOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GPUOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.GPUOptions.class, org.tensorflow.framework.GPUOptions.Builder.class); + } + + // Construct using org.tensorflow.framework.GPUOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + perProcessGpuMemoryFraction_ = 0D; + + allocatorType_ = ""; + + deferredDeletionBytes_ = 0L; + + allowGrowth_ = false; + + visibleDeviceList_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GPUOptions_descriptor; + } + + public org.tensorflow.framework.GPUOptions getDefaultInstanceForType() { + return org.tensorflow.framework.GPUOptions.getDefaultInstance(); + } + + public org.tensorflow.framework.GPUOptions build() { + org.tensorflow.framework.GPUOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.GPUOptions buildPartial() { + org.tensorflow.framework.GPUOptions result = new org.tensorflow.framework.GPUOptions(this); + result.perProcessGpuMemoryFraction_ = perProcessGpuMemoryFraction_; + result.allocatorType_ = allocatorType_; + result.deferredDeletionBytes_ = deferredDeletionBytes_; + result.allowGrowth_ = allowGrowth_; + result.visibleDeviceList_ = visibleDeviceList_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.GPUOptions) { + return mergeFrom((org.tensorflow.framework.GPUOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.GPUOptions other) { + if (other == org.tensorflow.framework.GPUOptions.getDefaultInstance()) return this; + if (other.getPerProcessGpuMemoryFraction() != 0D) { + setPerProcessGpuMemoryFraction(other.getPerProcessGpuMemoryFraction()); + } + if (!other.getAllocatorType().isEmpty()) { + allocatorType_ = other.allocatorType_; + onChanged(); + } + if (other.getDeferredDeletionBytes() != 0L) { + setDeferredDeletionBytes(other.getDeferredDeletionBytes()); + } + if (other.getAllowGrowth() != false) { + setAllowGrowth(other.getAllowGrowth()); + } + if (!other.getVisibleDeviceList().isEmpty()) { + visibleDeviceList_ = other.visibleDeviceList_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.GPUOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.GPUOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private double perProcessGpuMemoryFraction_ ; + /** + *
+     * A value between 0 and 1 that indicates what fraction of the
+     * available GPU memory to pre-allocate for each process.  1 means
+     * to pre-allocate all of the GPU memory, 0.5 means the process
+     * allocates ~50% of the available GPU memory.
+     * 
+ * + * optional double per_process_gpu_memory_fraction = 1; + */ + public double getPerProcessGpuMemoryFraction() { + return perProcessGpuMemoryFraction_; + } + /** + *
+     * A value between 0 and 1 that indicates what fraction of the
+     * available GPU memory to pre-allocate for each process.  1 means
+     * to pre-allocate all of the GPU memory, 0.5 means the process
+     * allocates ~50% of the available GPU memory.
+     * 
+ * + * optional double per_process_gpu_memory_fraction = 1; + */ + public Builder setPerProcessGpuMemoryFraction(double value) { + + perProcessGpuMemoryFraction_ = value; + onChanged(); + return this; + } + /** + *
+     * A value between 0 and 1 that indicates what fraction of the
+     * available GPU memory to pre-allocate for each process.  1 means
+     * to pre-allocate all of the GPU memory, 0.5 means the process
+     * allocates ~50% of the available GPU memory.
+     * 
+ * + * optional double per_process_gpu_memory_fraction = 1; + */ + public Builder clearPerProcessGpuMemoryFraction() { + + perProcessGpuMemoryFraction_ = 0D; + onChanged(); + return this; + } + + private java.lang.Object allocatorType_ = ""; + /** + *
+     * The type of GPU allocation strategy to use.
+     * Allowed values:
+     * "": The empty string (default) uses a system-chosen default
+     *     which may change over time.
+     * "BFC": A "Best-fit with coalescing" algorithm, simplified from a
+     *        version of dlmalloc.
+     * 
+ * + * optional string allocator_type = 2; + */ + public java.lang.String getAllocatorType() { + java.lang.Object ref = allocatorType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * The type of GPU allocation strategy to use.
+     * Allowed values:
+     * "": The empty string (default) uses a system-chosen default
+     *     which may change over time.
+     * "BFC": A "Best-fit with coalescing" algorithm, simplified from a
+     *        version of dlmalloc.
+     * 
+ * + * optional string allocator_type = 2; + */ + public com.google.protobuf.ByteString + getAllocatorTypeBytes() { + java.lang.Object ref = allocatorType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * The type of GPU allocation strategy to use.
+     * Allowed values:
+     * "": The empty string (default) uses a system-chosen default
+     *     which may change over time.
+     * "BFC": A "Best-fit with coalescing" algorithm, simplified from a
+     *        version of dlmalloc.
+     * 
+ * + * optional string allocator_type = 2; + */ + public Builder setAllocatorType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + allocatorType_ = value; + onChanged(); + return this; + } + /** + *
+     * The type of GPU allocation strategy to use.
+     * Allowed values:
+     * "": The empty string (default) uses a system-chosen default
+     *     which may change over time.
+     * "BFC": A "Best-fit with coalescing" algorithm, simplified from a
+     *        version of dlmalloc.
+     * 
+ * + * optional string allocator_type = 2; + */ + public Builder clearAllocatorType() { + + allocatorType_ = getDefaultInstance().getAllocatorType(); + onChanged(); + return this; + } + /** + *
+     * The type of GPU allocation strategy to use.
+     * Allowed values:
+     * "": The empty string (default) uses a system-chosen default
+     *     which may change over time.
+     * "BFC": A "Best-fit with coalescing" algorithm, simplified from a
+     *        version of dlmalloc.
+     * 
+ * + * optional string allocator_type = 2; + */ + public Builder setAllocatorTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + allocatorType_ = value; + onChanged(); + return this; + } + + private long deferredDeletionBytes_ ; + /** + *
+     * Delay deletion of up to this many bytes to reduce the number of
+     * interactions with gpu driver code.  If 0, the system chooses
+     * a reasonable default (several MBs).
+     * 
+ * + * optional int64 deferred_deletion_bytes = 3; + */ + public long getDeferredDeletionBytes() { + return deferredDeletionBytes_; + } + /** + *
+     * Delay deletion of up to this many bytes to reduce the number of
+     * interactions with gpu driver code.  If 0, the system chooses
+     * a reasonable default (several MBs).
+     * 
+ * + * optional int64 deferred_deletion_bytes = 3; + */ + public Builder setDeferredDeletionBytes(long value) { + + deferredDeletionBytes_ = value; + onChanged(); + return this; + } + /** + *
+     * Delay deletion of up to this many bytes to reduce the number of
+     * interactions with gpu driver code.  If 0, the system chooses
+     * a reasonable default (several MBs).
+     * 
+ * + * optional int64 deferred_deletion_bytes = 3; + */ + public Builder clearDeferredDeletionBytes() { + + deferredDeletionBytes_ = 0L; + onChanged(); + return this; + } + + private boolean allowGrowth_ ; + /** + *
+     * If true, the allocator does not pre-allocate the entire specified
+     * GPU memory region, instead starting small and growing as needed.
+     * 
+ * + * optional bool allow_growth = 4; + */ + public boolean getAllowGrowth() { + return allowGrowth_; + } + /** + *
+     * If true, the allocator does not pre-allocate the entire specified
+     * GPU memory region, instead starting small and growing as needed.
+     * 
+ * + * optional bool allow_growth = 4; + */ + public Builder setAllowGrowth(boolean value) { + + allowGrowth_ = value; + onChanged(); + return this; + } + /** + *
+     * If true, the allocator does not pre-allocate the entire specified
+     * GPU memory region, instead starting small and growing as needed.
+     * 
+ * + * optional bool allow_growth = 4; + */ + public Builder clearAllowGrowth() { + + allowGrowth_ = false; + onChanged(); + return this; + } + + private java.lang.Object visibleDeviceList_ = ""; + /** + *
+     * A comma-separated list of GPU ids that determines the 'visible'
+     * to 'virtual' mapping of GPU devices.  For example, if TensorFlow
+     * can see 8 GPU devices in the process, and one wanted to map
+     * visible GPU devices 5 and 3 as "/gpu:0", and "/gpu:1", then one
+     * would specify this field as "5,3".  This field is similar in
+     * spirit to the CUDA_VISIBLE_DEVICES environment variable, except
+     * it applies to the visible GPU devices in the process.
+     * NOTE: The GPU driver provides the process with the visible GPUs
+     * in an order which is not guaranteed to have any correlation to
+     * the *physical* GPU id in the machine.  This field is used for
+     * remapping "visible" to "virtual", which means this operates only
+     * after the process starts.  Users are required to use vendor
+     * specific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the
+     * physical to visible device mapping prior to invoking TensorFlow.
+     * 
+ * + * optional string visible_device_list = 5; + */ + public java.lang.String getVisibleDeviceList() { + java.lang.Object ref = visibleDeviceList_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + visibleDeviceList_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * A comma-separated list of GPU ids that determines the 'visible'
+     * to 'virtual' mapping of GPU devices.  For example, if TensorFlow
+     * can see 8 GPU devices in the process, and one wanted to map
+     * visible GPU devices 5 and 3 as "/gpu:0", and "/gpu:1", then one
+     * would specify this field as "5,3".  This field is similar in
+     * spirit to the CUDA_VISIBLE_DEVICES environment variable, except
+     * it applies to the visible GPU devices in the process.
+     * NOTE: The GPU driver provides the process with the visible GPUs
+     * in an order which is not guaranteed to have any correlation to
+     * the *physical* GPU id in the machine.  This field is used for
+     * remapping "visible" to "virtual", which means this operates only
+     * after the process starts.  Users are required to use vendor
+     * specific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the
+     * physical to visible device mapping prior to invoking TensorFlow.
+     * 
+ * + * optional string visible_device_list = 5; + */ + public com.google.protobuf.ByteString + getVisibleDeviceListBytes() { + java.lang.Object ref = visibleDeviceList_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + visibleDeviceList_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * A comma-separated list of GPU ids that determines the 'visible'
+     * to 'virtual' mapping of GPU devices.  For example, if TensorFlow
+     * can see 8 GPU devices in the process, and one wanted to map
+     * visible GPU devices 5 and 3 as "/gpu:0", and "/gpu:1", then one
+     * would specify this field as "5,3".  This field is similar in
+     * spirit to the CUDA_VISIBLE_DEVICES environment variable, except
+     * it applies to the visible GPU devices in the process.
+     * NOTE: The GPU driver provides the process with the visible GPUs
+     * in an order which is not guaranteed to have any correlation to
+     * the *physical* GPU id in the machine.  This field is used for
+     * remapping "visible" to "virtual", which means this operates only
+     * after the process starts.  Users are required to use vendor
+     * specific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the
+     * physical to visible device mapping prior to invoking TensorFlow.
+     * 
+ * + * optional string visible_device_list = 5; + */ + public Builder setVisibleDeviceList( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + visibleDeviceList_ = value; + onChanged(); + return this; + } + /** + *
+     * A comma-separated list of GPU ids that determines the 'visible'
+     * to 'virtual' mapping of GPU devices.  For example, if TensorFlow
+     * can see 8 GPU devices in the process, and one wanted to map
+     * visible GPU devices 5 and 3 as "/gpu:0", and "/gpu:1", then one
+     * would specify this field as "5,3".  This field is similar in
+     * spirit to the CUDA_VISIBLE_DEVICES environment variable, except
+     * it applies to the visible GPU devices in the process.
+     * NOTE: The GPU driver provides the process with the visible GPUs
+     * in an order which is not guaranteed to have any correlation to
+     * the *physical* GPU id in the machine.  This field is used for
+     * remapping "visible" to "virtual", which means this operates only
+     * after the process starts.  Users are required to use vendor
+     * specific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the
+     * physical to visible device mapping prior to invoking TensorFlow.
+     * 
+ * + * optional string visible_device_list = 5; + */ + public Builder clearVisibleDeviceList() { + + visibleDeviceList_ = getDefaultInstance().getVisibleDeviceList(); + onChanged(); + return this; + } + /** + *
+     * A comma-separated list of GPU ids that determines the 'visible'
+     * to 'virtual' mapping of GPU devices.  For example, if TensorFlow
+     * can see 8 GPU devices in the process, and one wanted to map
+     * visible GPU devices 5 and 3 as "/gpu:0", and "/gpu:1", then one
+     * would specify this field as "5,3".  This field is similar in
+     * spirit to the CUDA_VISIBLE_DEVICES environment variable, except
+     * it applies to the visible GPU devices in the process.
+     * NOTE: The GPU driver provides the process with the visible GPUs
+     * in an order which is not guaranteed to have any correlation to
+     * the *physical* GPU id in the machine.  This field is used for
+     * remapping "visible" to "virtual", which means this operates only
+     * after the process starts.  Users are required to use vendor
+     * specific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the
+     * physical to visible device mapping prior to invoking TensorFlow.
+     * 
+ * + * optional string visible_device_list = 5; + */ + public Builder setVisibleDeviceListBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + visibleDeviceList_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.GPUOptions) + } + + // @@protoc_insertion_point(class_scope:tensorflow.GPUOptions) + private static final org.tensorflow.framework.GPUOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.GPUOptions(); + } + + public static org.tensorflow.framework.GPUOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GPUOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GPUOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.GPUOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GPUOptionsOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GPUOptionsOrBuilder.java new file mode 100644 index 0000000000000..cdd2bb795f358 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GPUOptionsOrBuilder.java @@ -0,0 +1,114 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +public interface GPUOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.GPUOptions) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * A value between 0 and 1 that indicates what fraction of the
+   * available GPU memory to pre-allocate for each process.  1 means
+   * to pre-allocate all of the GPU memory, 0.5 means the process
+   * allocates ~50% of the available GPU memory.
+   * 
+ * + * optional double per_process_gpu_memory_fraction = 1; + */ + double getPerProcessGpuMemoryFraction(); + + /** + *
+   * The type of GPU allocation strategy to use.
+   * Allowed values:
+   * "": The empty string (default) uses a system-chosen default
+   *     which may change over time.
+   * "BFC": A "Best-fit with coalescing" algorithm, simplified from a
+   *        version of dlmalloc.
+   * 
+ * + * optional string allocator_type = 2; + */ + java.lang.String getAllocatorType(); + /** + *
+   * The type of GPU allocation strategy to use.
+   * Allowed values:
+   * "": The empty string (default) uses a system-chosen default
+   *     which may change over time.
+   * "BFC": A "Best-fit with coalescing" algorithm, simplified from a
+   *        version of dlmalloc.
+   * 
+ * + * optional string allocator_type = 2; + */ + com.google.protobuf.ByteString + getAllocatorTypeBytes(); + + /** + *
+   * Delay deletion of up to this many bytes to reduce the number of
+   * interactions with gpu driver code.  If 0, the system chooses
+   * a reasonable default (several MBs).
+   * 
+ * + * optional int64 deferred_deletion_bytes = 3; + */ + long getDeferredDeletionBytes(); + + /** + *
+   * If true, the allocator does not pre-allocate the entire specified
+   * GPU memory region, instead starting small and growing as needed.
+   * 
+ * + * optional bool allow_growth = 4; + */ + boolean getAllowGrowth(); + + /** + *
+   * A comma-separated list of GPU ids that determines the 'visible'
+   * to 'virtual' mapping of GPU devices.  For example, if TensorFlow
+   * can see 8 GPU devices in the process, and one wanted to map
+   * visible GPU devices 5 and 3 as "/gpu:0", and "/gpu:1", then one
+   * would specify this field as "5,3".  This field is similar in
+   * spirit to the CUDA_VISIBLE_DEVICES environment variable, except
+   * it applies to the visible GPU devices in the process.
+   * NOTE: The GPU driver provides the process with the visible GPUs
+   * in an order which is not guaranteed to have any correlation to
+   * the *physical* GPU id in the machine.  This field is used for
+   * remapping "visible" to "virtual", which means this operates only
+   * after the process starts.  Users are required to use vendor
+   * specific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the
+   * physical to visible device mapping prior to invoking TensorFlow.
+   * 
+ * + * optional string visible_device_list = 5; + */ + java.lang.String getVisibleDeviceList(); + /** + *
+   * A comma-separated list of GPU ids that determines the 'visible'
+   * to 'virtual' mapping of GPU devices.  For example, if TensorFlow
+   * can see 8 GPU devices in the process, and one wanted to map
+   * visible GPU devices 5 and 3 as "/gpu:0", and "/gpu:1", then one
+   * would specify this field as "5,3".  This field is similar in
+   * spirit to the CUDA_VISIBLE_DEVICES environment variable, except
+   * it applies to the visible GPU devices in the process.
+   * NOTE: The GPU driver provides the process with the visible GPUs
+   * in an order which is not guaranteed to have any correlation to
+   * the *physical* GPU id in the machine.  This field is used for
+   * remapping "visible" to "virtual", which means this operates only
+   * after the process starts.  Users are required to use vendor
+   * specific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the
+   * physical to visible device mapping prior to invoking TensorFlow.
+   * 
+ * + * optional string visible_device_list = 5; + */ + com.google.protobuf.ByteString + getVisibleDeviceListBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GradientDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GradientDef.java new file mode 100644 index 0000000000000..782b9a70a2448 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GradientDef.java @@ -0,0 +1,713 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/function.proto + +package org.tensorflow.framework; + +/** + *
+ * GradientDef defines the gradient function of a function defined in
+ * a function library.
+ * A gradient function g (specified by gradient_func) for a function f
+ * (specified by function_name) must follow the following:
+ * The function 'f' must be a numerical function which takes N inputs
+ * and produces M outputs. Its gradient function 'g', which is a
+ * function taking N + M inputs and produces N outputs.
+ * I.e. if we have
+ *    (y1, y2, ..., y_M) = f(x1, x2, ..., x_N),
+ * then, g is
+ *    (dL/dx1, dL/dx2, ..., dL/dx_N) = g(x1, x2, ..., x_N,
+ *                                      dL/dy1, dL/dy2, ..., dL/dy_M),
+ * where L is a scalar-value function of (x1, x2, ..., xN) (e.g., the
+ * loss function). dL/dx_i is the partial derivative of L with respect
+ * to x_i.
+ * 
+ * + * Protobuf type {@code tensorflow.GradientDef} + */ +public final class GradientDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.GradientDef) + GradientDefOrBuilder { + // Use GradientDef.newBuilder() to construct. + private GradientDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GradientDef() { + functionName_ = ""; + gradientFunc_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private GradientDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + functionName_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + gradientFunc_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_GradientDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_GradientDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.GradientDef.class, org.tensorflow.framework.GradientDef.Builder.class); + } + + public static final int FUNCTION_NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object functionName_; + /** + *
+   * The function name.
+   * 
+ * + * optional string function_name = 1; + */ + public java.lang.String getFunctionName() { + java.lang.Object ref = functionName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + functionName_ = s; + return s; + } + } + /** + *
+   * The function name.
+   * 
+ * + * optional string function_name = 1; + */ + public com.google.protobuf.ByteString + getFunctionNameBytes() { + java.lang.Object ref = functionName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + functionName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int GRADIENT_FUNC_FIELD_NUMBER = 2; + private volatile java.lang.Object gradientFunc_; + /** + *
+   * The gradient function's name.
+   * 
+ * + * optional string gradient_func = 2; + */ + public java.lang.String getGradientFunc() { + java.lang.Object ref = gradientFunc_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + gradientFunc_ = s; + return s; + } + } + /** + *
+   * The gradient function's name.
+   * 
+ * + * optional string gradient_func = 2; + */ + public com.google.protobuf.ByteString + getGradientFuncBytes() { + java.lang.Object ref = gradientFunc_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + gradientFunc_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getFunctionNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, functionName_); + } + if (!getGradientFuncBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, gradientFunc_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getFunctionNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, functionName_); + } + if (!getGradientFuncBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, gradientFunc_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.GradientDef)) { + return super.equals(obj); + } + org.tensorflow.framework.GradientDef other = (org.tensorflow.framework.GradientDef) obj; + + boolean result = true; + result = result && getFunctionName() + .equals(other.getFunctionName()); + result = result && getGradientFunc() + .equals(other.getGradientFunc()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + FUNCTION_NAME_FIELD_NUMBER; + hash = (53 * hash) + getFunctionName().hashCode(); + hash = (37 * hash) + GRADIENT_FUNC_FIELD_NUMBER; + hash = (53 * hash) + getGradientFunc().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.GradientDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.GradientDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.GradientDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.GradientDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.GradientDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GradientDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.GradientDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GradientDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.GradientDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GradientDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.GradientDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * GradientDef defines the gradient function of a function defined in
+   * a function library.
+   * A gradient function g (specified by gradient_func) for a function f
+   * (specified by function_name) must follow the following:
+   * The function 'f' must be a numerical function which takes N inputs
+   * and produces M outputs. Its gradient function 'g', which is a
+   * function taking N + M inputs and produces N outputs.
+   * I.e. if we have
+   *    (y1, y2, ..., y_M) = f(x1, x2, ..., x_N),
+   * then, g is
+   *    (dL/dx1, dL/dx2, ..., dL/dx_N) = g(x1, x2, ..., x_N,
+   *                                      dL/dy1, dL/dy2, ..., dL/dy_M),
+   * where L is a scalar-value function of (x1, x2, ..., xN) (e.g., the
+   * loss function). dL/dx_i is the partial derivative of L with respect
+   * to x_i.
+   * 
+ * + * Protobuf type {@code tensorflow.GradientDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.GradientDef) + org.tensorflow.framework.GradientDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_GradientDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_GradientDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.GradientDef.class, org.tensorflow.framework.GradientDef.Builder.class); + } + + // Construct using org.tensorflow.framework.GradientDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + functionName_ = ""; + + gradientFunc_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.FunctionProtos.internal_static_tensorflow_GradientDef_descriptor; + } + + public org.tensorflow.framework.GradientDef getDefaultInstanceForType() { + return org.tensorflow.framework.GradientDef.getDefaultInstance(); + } + + public org.tensorflow.framework.GradientDef build() { + org.tensorflow.framework.GradientDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.GradientDef buildPartial() { + org.tensorflow.framework.GradientDef result = new org.tensorflow.framework.GradientDef(this); + result.functionName_ = functionName_; + result.gradientFunc_ = gradientFunc_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.GradientDef) { + return mergeFrom((org.tensorflow.framework.GradientDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.GradientDef other) { + if (other == org.tensorflow.framework.GradientDef.getDefaultInstance()) return this; + if (!other.getFunctionName().isEmpty()) { + functionName_ = other.functionName_; + onChanged(); + } + if (!other.getGradientFunc().isEmpty()) { + gradientFunc_ = other.gradientFunc_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.GradientDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.GradientDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object functionName_ = ""; + /** + *
+     * The function name.
+     * 
+ * + * optional string function_name = 1; + */ + public java.lang.String getFunctionName() { + java.lang.Object ref = functionName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + functionName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * The function name.
+     * 
+ * + * optional string function_name = 1; + */ + public com.google.protobuf.ByteString + getFunctionNameBytes() { + java.lang.Object ref = functionName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + functionName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * The function name.
+     * 
+ * + * optional string function_name = 1; + */ + public Builder setFunctionName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + functionName_ = value; + onChanged(); + return this; + } + /** + *
+     * The function name.
+     * 
+ * + * optional string function_name = 1; + */ + public Builder clearFunctionName() { + + functionName_ = getDefaultInstance().getFunctionName(); + onChanged(); + return this; + } + /** + *
+     * The function name.
+     * 
+ * + * optional string function_name = 1; + */ + public Builder setFunctionNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + functionName_ = value; + onChanged(); + return this; + } + + private java.lang.Object gradientFunc_ = ""; + /** + *
+     * The gradient function's name.
+     * 
+ * + * optional string gradient_func = 2; + */ + public java.lang.String getGradientFunc() { + java.lang.Object ref = gradientFunc_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + gradientFunc_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * The gradient function's name.
+     * 
+ * + * optional string gradient_func = 2; + */ + public com.google.protobuf.ByteString + getGradientFuncBytes() { + java.lang.Object ref = gradientFunc_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + gradientFunc_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * The gradient function's name.
+     * 
+ * + * optional string gradient_func = 2; + */ + public Builder setGradientFunc( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + gradientFunc_ = value; + onChanged(); + return this; + } + /** + *
+     * The gradient function's name.
+     * 
+ * + * optional string gradient_func = 2; + */ + public Builder clearGradientFunc() { + + gradientFunc_ = getDefaultInstance().getGradientFunc(); + onChanged(); + return this; + } + /** + *
+     * The gradient function's name.
+     * 
+ * + * optional string gradient_func = 2; + */ + public Builder setGradientFuncBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + gradientFunc_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.GradientDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.GradientDef) + private static final org.tensorflow.framework.GradientDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.GradientDef(); + } + + public static org.tensorflow.framework.GradientDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GradientDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GradientDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.GradientDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GradientDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GradientDefOrBuilder.java new file mode 100644 index 0000000000000..a671ace439c9c --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GradientDefOrBuilder.java @@ -0,0 +1,45 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/function.proto + +package org.tensorflow.framework; + +public interface GradientDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.GradientDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * The function name.
+   * 
+ * + * optional string function_name = 1; + */ + java.lang.String getFunctionName(); + /** + *
+   * The function name.
+   * 
+ * + * optional string function_name = 1; + */ + com.google.protobuf.ByteString + getFunctionNameBytes(); + + /** + *
+   * The gradient function's name.
+   * 
+ * + * optional string gradient_func = 2; + */ + java.lang.String getGradientFunc(); + /** + *
+   * The gradient function's name.
+   * 
+ * + * optional string gradient_func = 2; + */ + com.google.protobuf.ByteString + getGradientFuncBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphDef.java new file mode 100644 index 0000000000000..8b5f4c8d74146 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphDef.java @@ -0,0 +1,1539 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/graph.proto + +package org.tensorflow.framework; + +/** + *
+ * Represents the graph of operations
+ * 
+ * + * Protobuf type {@code tensorflow.GraphDef} + */ +public final class GraphDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.GraphDef) + GraphDefOrBuilder { + // Use GraphDef.newBuilder() to construct. + private GraphDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GraphDef() { + node_ = java.util.Collections.emptyList(); + version_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private GraphDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + node_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + node_.add( + input.readMessage(org.tensorflow.framework.NodeDef.parser(), extensionRegistry)); + break; + } + case 18: { + org.tensorflow.framework.FunctionDefLibrary.Builder subBuilder = null; + if (library_ != null) { + subBuilder = library_.toBuilder(); + } + library_ = input.readMessage(org.tensorflow.framework.FunctionDefLibrary.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(library_); + library_ = subBuilder.buildPartial(); + } + + break; + } + case 24: { + + version_ = input.readInt32(); + break; + } + case 34: { + org.tensorflow.framework.VersionDef.Builder subBuilder = null; + if (versions_ != null) { + subBuilder = versions_.toBuilder(); + } + versions_ = input.readMessage(org.tensorflow.framework.VersionDef.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(versions_); + versions_ = subBuilder.buildPartial(); + } + + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + node_ = java.util.Collections.unmodifiableList(node_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.GraphProtos.internal_static_tensorflow_GraphDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.GraphProtos.internal_static_tensorflow_GraphDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.GraphDef.class, org.tensorflow.framework.GraphDef.Builder.class); + } + + private int bitField0_; + public static final int NODE_FIELD_NUMBER = 1; + private java.util.List node_; + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public java.util.List getNodeList() { + return node_; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public java.util.List + getNodeOrBuilderList() { + return node_; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public int getNodeCount() { + return node_.size(); + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public org.tensorflow.framework.NodeDef getNode(int index) { + return node_.get(index); + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public org.tensorflow.framework.NodeDefOrBuilder getNodeOrBuilder( + int index) { + return node_.get(index); + } + + public static final int VERSIONS_FIELD_NUMBER = 4; + private org.tensorflow.framework.VersionDef versions_; + /** + *
+   * Compatibility versions of the graph.  See core/public/version.h for version
+   * history.  The GraphDef version is distinct from the TensorFlow version, and
+   * each release of TensorFlow will support a range of GraphDef versions.
+   * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public boolean hasVersions() { + return versions_ != null; + } + /** + *
+   * Compatibility versions of the graph.  See core/public/version.h for version
+   * history.  The GraphDef version is distinct from the TensorFlow version, and
+   * each release of TensorFlow will support a range of GraphDef versions.
+   * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public org.tensorflow.framework.VersionDef getVersions() { + return versions_ == null ? org.tensorflow.framework.VersionDef.getDefaultInstance() : versions_; + } + /** + *
+   * Compatibility versions of the graph.  See core/public/version.h for version
+   * history.  The GraphDef version is distinct from the TensorFlow version, and
+   * each release of TensorFlow will support a range of GraphDef versions.
+   * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public org.tensorflow.framework.VersionDefOrBuilder getVersionsOrBuilder() { + return getVersions(); + } + + public static final int VERSION_FIELD_NUMBER = 3; + private int version_; + /** + *
+   * Deprecated single version field; use versions above instead.  Since all
+   * GraphDef changes before "versions" was introduced were forward
+   * compatible, this field is entirely ignored.
+   * 
+ * + * optional int32 version = 3 [deprecated = true]; + */ + @java.lang.Deprecated public int getVersion() { + return version_; + } + + public static final int LIBRARY_FIELD_NUMBER = 2; + private org.tensorflow.framework.FunctionDefLibrary library_; + /** + *
+   * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+   * "library" provides user-defined functions.
+   * Naming:
+   *   * library.function.name are in a flat namespace.
+   *     NOTE: We may need to change it to be hierarchical to support
+   *     different orgs. E.g.,
+   *     { "/google/nn", { ... }},
+   *     { "/google/vision", { ... }}
+   *     { "/org_foo/module_bar", { ... }}
+   *     map<string, FunctionDefLib> named_lib;
+   *   * If node[i].op is the name of one function in "library",
+   *     node[i] is deemed as a function call. Otherwise, node[i].op
+   *     must be a primitive operation supported by the runtime.
+   * Function call semantics:
+   *   * The callee may start execution as soon as some of its inputs
+   *     are ready. The caller may want to use Tuple() mechanism to
+   *     ensure all inputs are ready in the same time.
+   *   * The consumer of return values may start executing as soon as
+   *     the return values the consumer depends on are ready.  The
+   *     consumer may want to use Tuple() mechanism to ensure the
+   *     consumer does not start until all return values of the callee
+   *     function are ready.
+   * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public boolean hasLibrary() { + return library_ != null; + } + /** + *
+   * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+   * "library" provides user-defined functions.
+   * Naming:
+   *   * library.function.name are in a flat namespace.
+   *     NOTE: We may need to change it to be hierarchical to support
+   *     different orgs. E.g.,
+   *     { "/google/nn", { ... }},
+   *     { "/google/vision", { ... }}
+   *     { "/org_foo/module_bar", { ... }}
+   *     map<string, FunctionDefLib> named_lib;
+   *   * If node[i].op is the name of one function in "library",
+   *     node[i] is deemed as a function call. Otherwise, node[i].op
+   *     must be a primitive operation supported by the runtime.
+   * Function call semantics:
+   *   * The callee may start execution as soon as some of its inputs
+   *     are ready. The caller may want to use Tuple() mechanism to
+   *     ensure all inputs are ready in the same time.
+   *   * The consumer of return values may start executing as soon as
+   *     the return values the consumer depends on are ready.  The
+   *     consumer may want to use Tuple() mechanism to ensure the
+   *     consumer does not start until all return values of the callee
+   *     function are ready.
+   * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public org.tensorflow.framework.FunctionDefLibrary getLibrary() { + return library_ == null ? org.tensorflow.framework.FunctionDefLibrary.getDefaultInstance() : library_; + } + /** + *
+   * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+   * "library" provides user-defined functions.
+   * Naming:
+   *   * library.function.name are in a flat namespace.
+   *     NOTE: We may need to change it to be hierarchical to support
+   *     different orgs. E.g.,
+   *     { "/google/nn", { ... }},
+   *     { "/google/vision", { ... }}
+   *     { "/org_foo/module_bar", { ... }}
+   *     map<string, FunctionDefLib> named_lib;
+   *   * If node[i].op is the name of one function in "library",
+   *     node[i] is deemed as a function call. Otherwise, node[i].op
+   *     must be a primitive operation supported by the runtime.
+   * Function call semantics:
+   *   * The callee may start execution as soon as some of its inputs
+   *     are ready. The caller may want to use Tuple() mechanism to
+   *     ensure all inputs are ready in the same time.
+   *   * The consumer of return values may start executing as soon as
+   *     the return values the consumer depends on are ready.  The
+   *     consumer may want to use Tuple() mechanism to ensure the
+   *     consumer does not start until all return values of the callee
+   *     function are ready.
+   * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public org.tensorflow.framework.FunctionDefLibraryOrBuilder getLibraryOrBuilder() { + return getLibrary(); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < node_.size(); i++) { + output.writeMessage(1, node_.get(i)); + } + if (library_ != null) { + output.writeMessage(2, getLibrary()); + } + if (version_ != 0) { + output.writeInt32(3, version_); + } + if (versions_ != null) { + output.writeMessage(4, getVersions()); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < node_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, node_.get(i)); + } + if (library_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getLibrary()); + } + if (version_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, version_); + } + if (versions_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getVersions()); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.GraphDef)) { + return super.equals(obj); + } + org.tensorflow.framework.GraphDef other = (org.tensorflow.framework.GraphDef) obj; + + boolean result = true; + result = result && getNodeList() + .equals(other.getNodeList()); + result = result && (hasVersions() == other.hasVersions()); + if (hasVersions()) { + result = result && getVersions() + .equals(other.getVersions()); + } + result = result && (getVersion() + == other.getVersion()); + result = result && (hasLibrary() == other.hasLibrary()); + if (hasLibrary()) { + result = result && getLibrary() + .equals(other.getLibrary()); + } + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getNodeCount() > 0) { + hash = (37 * hash) + NODE_FIELD_NUMBER; + hash = (53 * hash) + getNodeList().hashCode(); + } + if (hasVersions()) { + hash = (37 * hash) + VERSIONS_FIELD_NUMBER; + hash = (53 * hash) + getVersions().hashCode(); + } + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion(); + if (hasLibrary()) { + hash = (37 * hash) + LIBRARY_FIELD_NUMBER; + hash = (53 * hash) + getLibrary().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.GraphDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.GraphDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.GraphDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.GraphDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.GraphDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GraphDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.GraphDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GraphDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.GraphDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GraphDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.GraphDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Represents the graph of operations
+   * 
+ * + * Protobuf type {@code tensorflow.GraphDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.GraphDef) + org.tensorflow.framework.GraphDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.GraphProtos.internal_static_tensorflow_GraphDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.GraphProtos.internal_static_tensorflow_GraphDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.GraphDef.class, org.tensorflow.framework.GraphDef.Builder.class); + } + + // Construct using org.tensorflow.framework.GraphDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getNodeFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (nodeBuilder_ == null) { + node_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + nodeBuilder_.clear(); + } + if (versionsBuilder_ == null) { + versions_ = null; + } else { + versions_ = null; + versionsBuilder_ = null; + } + version_ = 0; + + if (libraryBuilder_ == null) { + library_ = null; + } else { + library_ = null; + libraryBuilder_ = null; + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.GraphProtos.internal_static_tensorflow_GraphDef_descriptor; + } + + public org.tensorflow.framework.GraphDef getDefaultInstanceForType() { + return org.tensorflow.framework.GraphDef.getDefaultInstance(); + } + + public org.tensorflow.framework.GraphDef build() { + org.tensorflow.framework.GraphDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.GraphDef buildPartial() { + org.tensorflow.framework.GraphDef result = new org.tensorflow.framework.GraphDef(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (nodeBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + node_ = java.util.Collections.unmodifiableList(node_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.node_ = node_; + } else { + result.node_ = nodeBuilder_.build(); + } + if (versionsBuilder_ == null) { + result.versions_ = versions_; + } else { + result.versions_ = versionsBuilder_.build(); + } + result.version_ = version_; + if (libraryBuilder_ == null) { + result.library_ = library_; + } else { + result.library_ = libraryBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.GraphDef) { + return mergeFrom((org.tensorflow.framework.GraphDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.GraphDef other) { + if (other == org.tensorflow.framework.GraphDef.getDefaultInstance()) return this; + if (nodeBuilder_ == null) { + if (!other.node_.isEmpty()) { + if (node_.isEmpty()) { + node_ = other.node_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureNodeIsMutable(); + node_.addAll(other.node_); + } + onChanged(); + } + } else { + if (!other.node_.isEmpty()) { + if (nodeBuilder_.isEmpty()) { + nodeBuilder_.dispose(); + nodeBuilder_ = null; + node_ = other.node_; + bitField0_ = (bitField0_ & ~0x00000001); + nodeBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getNodeFieldBuilder() : null; + } else { + nodeBuilder_.addAllMessages(other.node_); + } + } + } + if (other.hasVersions()) { + mergeVersions(other.getVersions()); + } + if (other.getVersion() != 0) { + setVersion(other.getVersion()); + } + if (other.hasLibrary()) { + mergeLibrary(other.getLibrary()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.GraphDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.GraphDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List node_ = + java.util.Collections.emptyList(); + private void ensureNodeIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + node_ = new java.util.ArrayList(node_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeDef, org.tensorflow.framework.NodeDef.Builder, org.tensorflow.framework.NodeDefOrBuilder> nodeBuilder_; + + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public java.util.List getNodeList() { + if (nodeBuilder_ == null) { + return java.util.Collections.unmodifiableList(node_); + } else { + return nodeBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public int getNodeCount() { + if (nodeBuilder_ == null) { + return node_.size(); + } else { + return nodeBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public org.tensorflow.framework.NodeDef getNode(int index) { + if (nodeBuilder_ == null) { + return node_.get(index); + } else { + return nodeBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public Builder setNode( + int index, org.tensorflow.framework.NodeDef value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeIsMutable(); + node_.set(index, value); + onChanged(); + } else { + nodeBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public Builder setNode( + int index, org.tensorflow.framework.NodeDef.Builder builderForValue) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.set(index, builderForValue.build()); + onChanged(); + } else { + nodeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public Builder addNode(org.tensorflow.framework.NodeDef value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeIsMutable(); + node_.add(value); + onChanged(); + } else { + nodeBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public Builder addNode( + int index, org.tensorflow.framework.NodeDef value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureNodeIsMutable(); + node_.add(index, value); + onChanged(); + } else { + nodeBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public Builder addNode( + org.tensorflow.framework.NodeDef.Builder builderForValue) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.add(builderForValue.build()); + onChanged(); + } else { + nodeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public Builder addNode( + int index, org.tensorflow.framework.NodeDef.Builder builderForValue) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.add(index, builderForValue.build()); + onChanged(); + } else { + nodeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public Builder addAllNode( + java.lang.Iterable values) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, node_); + onChanged(); + } else { + nodeBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public Builder clearNode() { + if (nodeBuilder_ == null) { + node_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + nodeBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public Builder removeNode(int index) { + if (nodeBuilder_ == null) { + ensureNodeIsMutable(); + node_.remove(index); + onChanged(); + } else { + nodeBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public org.tensorflow.framework.NodeDef.Builder getNodeBuilder( + int index) { + return getNodeFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public org.tensorflow.framework.NodeDefOrBuilder getNodeOrBuilder( + int index) { + if (nodeBuilder_ == null) { + return node_.get(index); } else { + return nodeBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public java.util.List + getNodeOrBuilderList() { + if (nodeBuilder_ != null) { + return nodeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(node_); + } + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public org.tensorflow.framework.NodeDef.Builder addNodeBuilder() { + return getNodeFieldBuilder().addBuilder( + org.tensorflow.framework.NodeDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public org.tensorflow.framework.NodeDef.Builder addNodeBuilder( + int index) { + return getNodeFieldBuilder().addBuilder( + index, org.tensorflow.framework.NodeDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.NodeDef node = 1; + */ + public java.util.List + getNodeBuilderList() { + return getNodeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeDef, org.tensorflow.framework.NodeDef.Builder, org.tensorflow.framework.NodeDefOrBuilder> + getNodeFieldBuilder() { + if (nodeBuilder_ == null) { + nodeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeDef, org.tensorflow.framework.NodeDef.Builder, org.tensorflow.framework.NodeDefOrBuilder>( + node_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + node_ = null; + } + return nodeBuilder_; + } + + private org.tensorflow.framework.VersionDef versions_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.VersionDef, org.tensorflow.framework.VersionDef.Builder, org.tensorflow.framework.VersionDefOrBuilder> versionsBuilder_; + /** + *
+     * Compatibility versions of the graph.  See core/public/version.h for version
+     * history.  The GraphDef version is distinct from the TensorFlow version, and
+     * each release of TensorFlow will support a range of GraphDef versions.
+     * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public boolean hasVersions() { + return versionsBuilder_ != null || versions_ != null; + } + /** + *
+     * Compatibility versions of the graph.  See core/public/version.h for version
+     * history.  The GraphDef version is distinct from the TensorFlow version, and
+     * each release of TensorFlow will support a range of GraphDef versions.
+     * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public org.tensorflow.framework.VersionDef getVersions() { + if (versionsBuilder_ == null) { + return versions_ == null ? org.tensorflow.framework.VersionDef.getDefaultInstance() : versions_; + } else { + return versionsBuilder_.getMessage(); + } + } + /** + *
+     * Compatibility versions of the graph.  See core/public/version.h for version
+     * history.  The GraphDef version is distinct from the TensorFlow version, and
+     * each release of TensorFlow will support a range of GraphDef versions.
+     * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public Builder setVersions(org.tensorflow.framework.VersionDef value) { + if (versionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + versions_ = value; + onChanged(); + } else { + versionsBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Compatibility versions of the graph.  See core/public/version.h for version
+     * history.  The GraphDef version is distinct from the TensorFlow version, and
+     * each release of TensorFlow will support a range of GraphDef versions.
+     * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public Builder setVersions( + org.tensorflow.framework.VersionDef.Builder builderForValue) { + if (versionsBuilder_ == null) { + versions_ = builderForValue.build(); + onChanged(); + } else { + versionsBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Compatibility versions of the graph.  See core/public/version.h for version
+     * history.  The GraphDef version is distinct from the TensorFlow version, and
+     * each release of TensorFlow will support a range of GraphDef versions.
+     * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public Builder mergeVersions(org.tensorflow.framework.VersionDef value) { + if (versionsBuilder_ == null) { + if (versions_ != null) { + versions_ = + org.tensorflow.framework.VersionDef.newBuilder(versions_).mergeFrom(value).buildPartial(); + } else { + versions_ = value; + } + onChanged(); + } else { + versionsBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Compatibility versions of the graph.  See core/public/version.h for version
+     * history.  The GraphDef version is distinct from the TensorFlow version, and
+     * each release of TensorFlow will support a range of GraphDef versions.
+     * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public Builder clearVersions() { + if (versionsBuilder_ == null) { + versions_ = null; + onChanged(); + } else { + versions_ = null; + versionsBuilder_ = null; + } + + return this; + } + /** + *
+     * Compatibility versions of the graph.  See core/public/version.h for version
+     * history.  The GraphDef version is distinct from the TensorFlow version, and
+     * each release of TensorFlow will support a range of GraphDef versions.
+     * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public org.tensorflow.framework.VersionDef.Builder getVersionsBuilder() { + + onChanged(); + return getVersionsFieldBuilder().getBuilder(); + } + /** + *
+     * Compatibility versions of the graph.  See core/public/version.h for version
+     * history.  The GraphDef version is distinct from the TensorFlow version, and
+     * each release of TensorFlow will support a range of GraphDef versions.
+     * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + public org.tensorflow.framework.VersionDefOrBuilder getVersionsOrBuilder() { + if (versionsBuilder_ != null) { + return versionsBuilder_.getMessageOrBuilder(); + } else { + return versions_ == null ? + org.tensorflow.framework.VersionDef.getDefaultInstance() : versions_; + } + } + /** + *
+     * Compatibility versions of the graph.  See core/public/version.h for version
+     * history.  The GraphDef version is distinct from the TensorFlow version, and
+     * each release of TensorFlow will support a range of GraphDef versions.
+     * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.VersionDef, org.tensorflow.framework.VersionDef.Builder, org.tensorflow.framework.VersionDefOrBuilder> + getVersionsFieldBuilder() { + if (versionsBuilder_ == null) { + versionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.VersionDef, org.tensorflow.framework.VersionDef.Builder, org.tensorflow.framework.VersionDefOrBuilder>( + getVersions(), + getParentForChildren(), + isClean()); + versions_ = null; + } + return versionsBuilder_; + } + + private int version_ ; + /** + *
+     * Deprecated single version field; use versions above instead.  Since all
+     * GraphDef changes before "versions" was introduced were forward
+     * compatible, this field is entirely ignored.
+     * 
+ * + * optional int32 version = 3 [deprecated = true]; + */ + @java.lang.Deprecated public int getVersion() { + return version_; + } + /** + *
+     * Deprecated single version field; use versions above instead.  Since all
+     * GraphDef changes before "versions" was introduced were forward
+     * compatible, this field is entirely ignored.
+     * 
+ * + * optional int32 version = 3 [deprecated = true]; + */ + @java.lang.Deprecated public Builder setVersion(int value) { + + version_ = value; + onChanged(); + return this; + } + /** + *
+     * Deprecated single version field; use versions above instead.  Since all
+     * GraphDef changes before "versions" was introduced were forward
+     * compatible, this field is entirely ignored.
+     * 
+ * + * optional int32 version = 3 [deprecated = true]; + */ + @java.lang.Deprecated public Builder clearVersion() { + + version_ = 0; + onChanged(); + return this; + } + + private org.tensorflow.framework.FunctionDefLibrary library_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.FunctionDefLibrary, org.tensorflow.framework.FunctionDefLibrary.Builder, org.tensorflow.framework.FunctionDefLibraryOrBuilder> libraryBuilder_; + /** + *
+     * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+     * "library" provides user-defined functions.
+     * Naming:
+     *   * library.function.name are in a flat namespace.
+     *     NOTE: We may need to change it to be hierarchical to support
+     *     different orgs. E.g.,
+     *     { "/google/nn", { ... }},
+     *     { "/google/vision", { ... }}
+     *     { "/org_foo/module_bar", { ... }}
+     *     map<string, FunctionDefLib> named_lib;
+     *   * If node[i].op is the name of one function in "library",
+     *     node[i] is deemed as a function call. Otherwise, node[i].op
+     *     must be a primitive operation supported by the runtime.
+     * Function call semantics:
+     *   * The callee may start execution as soon as some of its inputs
+     *     are ready. The caller may want to use Tuple() mechanism to
+     *     ensure all inputs are ready in the same time.
+     *   * The consumer of return values may start executing as soon as
+     *     the return values the consumer depends on are ready.  The
+     *     consumer may want to use Tuple() mechanism to ensure the
+     *     consumer does not start until all return values of the callee
+     *     function are ready.
+     * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public boolean hasLibrary() { + return libraryBuilder_ != null || library_ != null; + } + /** + *
+     * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+     * "library" provides user-defined functions.
+     * Naming:
+     *   * library.function.name are in a flat namespace.
+     *     NOTE: We may need to change it to be hierarchical to support
+     *     different orgs. E.g.,
+     *     { "/google/nn", { ... }},
+     *     { "/google/vision", { ... }}
+     *     { "/org_foo/module_bar", { ... }}
+     *     map<string, FunctionDefLib> named_lib;
+     *   * If node[i].op is the name of one function in "library",
+     *     node[i] is deemed as a function call. Otherwise, node[i].op
+     *     must be a primitive operation supported by the runtime.
+     * Function call semantics:
+     *   * The callee may start execution as soon as some of its inputs
+     *     are ready. The caller may want to use Tuple() mechanism to
+     *     ensure all inputs are ready in the same time.
+     *   * The consumer of return values may start executing as soon as
+     *     the return values the consumer depends on are ready.  The
+     *     consumer may want to use Tuple() mechanism to ensure the
+     *     consumer does not start until all return values of the callee
+     *     function are ready.
+     * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public org.tensorflow.framework.FunctionDefLibrary getLibrary() { + if (libraryBuilder_ == null) { + return library_ == null ? org.tensorflow.framework.FunctionDefLibrary.getDefaultInstance() : library_; + } else { + return libraryBuilder_.getMessage(); + } + } + /** + *
+     * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+     * "library" provides user-defined functions.
+     * Naming:
+     *   * library.function.name are in a flat namespace.
+     *     NOTE: We may need to change it to be hierarchical to support
+     *     different orgs. E.g.,
+     *     { "/google/nn", { ... }},
+     *     { "/google/vision", { ... }}
+     *     { "/org_foo/module_bar", { ... }}
+     *     map<string, FunctionDefLib> named_lib;
+     *   * If node[i].op is the name of one function in "library",
+     *     node[i] is deemed as a function call. Otherwise, node[i].op
+     *     must be a primitive operation supported by the runtime.
+     * Function call semantics:
+     *   * The callee may start execution as soon as some of its inputs
+     *     are ready. The caller may want to use Tuple() mechanism to
+     *     ensure all inputs are ready in the same time.
+     *   * The consumer of return values may start executing as soon as
+     *     the return values the consumer depends on are ready.  The
+     *     consumer may want to use Tuple() mechanism to ensure the
+     *     consumer does not start until all return values of the callee
+     *     function are ready.
+     * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public Builder setLibrary(org.tensorflow.framework.FunctionDefLibrary value) { + if (libraryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + library_ = value; + onChanged(); + } else { + libraryBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+     * "library" provides user-defined functions.
+     * Naming:
+     *   * library.function.name are in a flat namespace.
+     *     NOTE: We may need to change it to be hierarchical to support
+     *     different orgs. E.g.,
+     *     { "/google/nn", { ... }},
+     *     { "/google/vision", { ... }}
+     *     { "/org_foo/module_bar", { ... }}
+     *     map<string, FunctionDefLib> named_lib;
+     *   * If node[i].op is the name of one function in "library",
+     *     node[i] is deemed as a function call. Otherwise, node[i].op
+     *     must be a primitive operation supported by the runtime.
+     * Function call semantics:
+     *   * The callee may start execution as soon as some of its inputs
+     *     are ready. The caller may want to use Tuple() mechanism to
+     *     ensure all inputs are ready in the same time.
+     *   * The consumer of return values may start executing as soon as
+     *     the return values the consumer depends on are ready.  The
+     *     consumer may want to use Tuple() mechanism to ensure the
+     *     consumer does not start until all return values of the callee
+     *     function are ready.
+     * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public Builder setLibrary( + org.tensorflow.framework.FunctionDefLibrary.Builder builderForValue) { + if (libraryBuilder_ == null) { + library_ = builderForValue.build(); + onChanged(); + } else { + libraryBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+     * "library" provides user-defined functions.
+     * Naming:
+     *   * library.function.name are in a flat namespace.
+     *     NOTE: We may need to change it to be hierarchical to support
+     *     different orgs. E.g.,
+     *     { "/google/nn", { ... }},
+     *     { "/google/vision", { ... }}
+     *     { "/org_foo/module_bar", { ... }}
+     *     map<string, FunctionDefLib> named_lib;
+     *   * If node[i].op is the name of one function in "library",
+     *     node[i] is deemed as a function call. Otherwise, node[i].op
+     *     must be a primitive operation supported by the runtime.
+     * Function call semantics:
+     *   * The callee may start execution as soon as some of its inputs
+     *     are ready. The caller may want to use Tuple() mechanism to
+     *     ensure all inputs are ready in the same time.
+     *   * The consumer of return values may start executing as soon as
+     *     the return values the consumer depends on are ready.  The
+     *     consumer may want to use Tuple() mechanism to ensure the
+     *     consumer does not start until all return values of the callee
+     *     function are ready.
+     * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public Builder mergeLibrary(org.tensorflow.framework.FunctionDefLibrary value) { + if (libraryBuilder_ == null) { + if (library_ != null) { + library_ = + org.tensorflow.framework.FunctionDefLibrary.newBuilder(library_).mergeFrom(value).buildPartial(); + } else { + library_ = value; + } + onChanged(); + } else { + libraryBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+     * "library" provides user-defined functions.
+     * Naming:
+     *   * library.function.name are in a flat namespace.
+     *     NOTE: We may need to change it to be hierarchical to support
+     *     different orgs. E.g.,
+     *     { "/google/nn", { ... }},
+     *     { "/google/vision", { ... }}
+     *     { "/org_foo/module_bar", { ... }}
+     *     map<string, FunctionDefLib> named_lib;
+     *   * If node[i].op is the name of one function in "library",
+     *     node[i] is deemed as a function call. Otherwise, node[i].op
+     *     must be a primitive operation supported by the runtime.
+     * Function call semantics:
+     *   * The callee may start execution as soon as some of its inputs
+     *     are ready. The caller may want to use Tuple() mechanism to
+     *     ensure all inputs are ready in the same time.
+     *   * The consumer of return values may start executing as soon as
+     *     the return values the consumer depends on are ready.  The
+     *     consumer may want to use Tuple() mechanism to ensure the
+     *     consumer does not start until all return values of the callee
+     *     function are ready.
+     * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public Builder clearLibrary() { + if (libraryBuilder_ == null) { + library_ = null; + onChanged(); + } else { + library_ = null; + libraryBuilder_ = null; + } + + return this; + } + /** + *
+     * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+     * "library" provides user-defined functions.
+     * Naming:
+     *   * library.function.name are in a flat namespace.
+     *     NOTE: We may need to change it to be hierarchical to support
+     *     different orgs. E.g.,
+     *     { "/google/nn", { ... }},
+     *     { "/google/vision", { ... }}
+     *     { "/org_foo/module_bar", { ... }}
+     *     map<string, FunctionDefLib> named_lib;
+     *   * If node[i].op is the name of one function in "library",
+     *     node[i] is deemed as a function call. Otherwise, node[i].op
+     *     must be a primitive operation supported by the runtime.
+     * Function call semantics:
+     *   * The callee may start execution as soon as some of its inputs
+     *     are ready. The caller may want to use Tuple() mechanism to
+     *     ensure all inputs are ready in the same time.
+     *   * The consumer of return values may start executing as soon as
+     *     the return values the consumer depends on are ready.  The
+     *     consumer may want to use Tuple() mechanism to ensure the
+     *     consumer does not start until all return values of the callee
+     *     function are ready.
+     * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public org.tensorflow.framework.FunctionDefLibrary.Builder getLibraryBuilder() { + + onChanged(); + return getLibraryFieldBuilder().getBuilder(); + } + /** + *
+     * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+     * "library" provides user-defined functions.
+     * Naming:
+     *   * library.function.name are in a flat namespace.
+     *     NOTE: We may need to change it to be hierarchical to support
+     *     different orgs. E.g.,
+     *     { "/google/nn", { ... }},
+     *     { "/google/vision", { ... }}
+     *     { "/org_foo/module_bar", { ... }}
+     *     map<string, FunctionDefLib> named_lib;
+     *   * If node[i].op is the name of one function in "library",
+     *     node[i] is deemed as a function call. Otherwise, node[i].op
+     *     must be a primitive operation supported by the runtime.
+     * Function call semantics:
+     *   * The callee may start execution as soon as some of its inputs
+     *     are ready. The caller may want to use Tuple() mechanism to
+     *     ensure all inputs are ready in the same time.
+     *   * The consumer of return values may start executing as soon as
+     *     the return values the consumer depends on are ready.  The
+     *     consumer may want to use Tuple() mechanism to ensure the
+     *     consumer does not start until all return values of the callee
+     *     function are ready.
+     * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + public org.tensorflow.framework.FunctionDefLibraryOrBuilder getLibraryOrBuilder() { + if (libraryBuilder_ != null) { + return libraryBuilder_.getMessageOrBuilder(); + } else { + return library_ == null ? + org.tensorflow.framework.FunctionDefLibrary.getDefaultInstance() : library_; + } + } + /** + *
+     * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+     * "library" provides user-defined functions.
+     * Naming:
+     *   * library.function.name are in a flat namespace.
+     *     NOTE: We may need to change it to be hierarchical to support
+     *     different orgs. E.g.,
+     *     { "/google/nn", { ... }},
+     *     { "/google/vision", { ... }}
+     *     { "/org_foo/module_bar", { ... }}
+     *     map<string, FunctionDefLib> named_lib;
+     *   * If node[i].op is the name of one function in "library",
+     *     node[i] is deemed as a function call. Otherwise, node[i].op
+     *     must be a primitive operation supported by the runtime.
+     * Function call semantics:
+     *   * The callee may start execution as soon as some of its inputs
+     *     are ready. The caller may want to use Tuple() mechanism to
+     *     ensure all inputs are ready in the same time.
+     *   * The consumer of return values may start executing as soon as
+     *     the return values the consumer depends on are ready.  The
+     *     consumer may want to use Tuple() mechanism to ensure the
+     *     consumer does not start until all return values of the callee
+     *     function are ready.
+     * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.FunctionDefLibrary, org.tensorflow.framework.FunctionDefLibrary.Builder, org.tensorflow.framework.FunctionDefLibraryOrBuilder> + getLibraryFieldBuilder() { + if (libraryBuilder_ == null) { + libraryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.FunctionDefLibrary, org.tensorflow.framework.FunctionDefLibrary.Builder, org.tensorflow.framework.FunctionDefLibraryOrBuilder>( + getLibrary(), + getParentForChildren(), + isClean()); + library_ = null; + } + return libraryBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.GraphDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.GraphDef) + private static final org.tensorflow.framework.GraphDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.GraphDef(); + } + + public static org.tensorflow.framework.GraphDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GraphDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GraphDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.GraphDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphDefOrBuilder.java new file mode 100644 index 0000000000000..53ab5638e29dd --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphDefOrBuilder.java @@ -0,0 +1,163 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/graph.proto + +package org.tensorflow.framework; + +public interface GraphDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.GraphDef) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .tensorflow.NodeDef node = 1; + */ + java.util.List + getNodeList(); + /** + * repeated .tensorflow.NodeDef node = 1; + */ + org.tensorflow.framework.NodeDef getNode(int index); + /** + * repeated .tensorflow.NodeDef node = 1; + */ + int getNodeCount(); + /** + * repeated .tensorflow.NodeDef node = 1; + */ + java.util.List + getNodeOrBuilderList(); + /** + * repeated .tensorflow.NodeDef node = 1; + */ + org.tensorflow.framework.NodeDefOrBuilder getNodeOrBuilder( + int index); + + /** + *
+   * Compatibility versions of the graph.  See core/public/version.h for version
+   * history.  The GraphDef version is distinct from the TensorFlow version, and
+   * each release of TensorFlow will support a range of GraphDef versions.
+   * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + boolean hasVersions(); + /** + *
+   * Compatibility versions of the graph.  See core/public/version.h for version
+   * history.  The GraphDef version is distinct from the TensorFlow version, and
+   * each release of TensorFlow will support a range of GraphDef versions.
+   * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + org.tensorflow.framework.VersionDef getVersions(); + /** + *
+   * Compatibility versions of the graph.  See core/public/version.h for version
+   * history.  The GraphDef version is distinct from the TensorFlow version, and
+   * each release of TensorFlow will support a range of GraphDef versions.
+   * 
+ * + * optional .tensorflow.VersionDef versions = 4; + */ + org.tensorflow.framework.VersionDefOrBuilder getVersionsOrBuilder(); + + /** + *
+   * Deprecated single version field; use versions above instead.  Since all
+   * GraphDef changes before "versions" was introduced were forward
+   * compatible, this field is entirely ignored.
+   * 
+ * + * optional int32 version = 3 [deprecated = true]; + */ + @java.lang.Deprecated int getVersion(); + + /** + *
+   * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+   * "library" provides user-defined functions.
+   * Naming:
+   *   * library.function.name are in a flat namespace.
+   *     NOTE: We may need to change it to be hierarchical to support
+   *     different orgs. E.g.,
+   *     { "/google/nn", { ... }},
+   *     { "/google/vision", { ... }}
+   *     { "/org_foo/module_bar", { ... }}
+   *     map<string, FunctionDefLib> named_lib;
+   *   * If node[i].op is the name of one function in "library",
+   *     node[i] is deemed as a function call. Otherwise, node[i].op
+   *     must be a primitive operation supported by the runtime.
+   * Function call semantics:
+   *   * The callee may start execution as soon as some of its inputs
+   *     are ready. The caller may want to use Tuple() mechanism to
+   *     ensure all inputs are ready in the same time.
+   *   * The consumer of return values may start executing as soon as
+   *     the return values the consumer depends on are ready.  The
+   *     consumer may want to use Tuple() mechanism to ensure the
+   *     consumer does not start until all return values of the callee
+   *     function are ready.
+   * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + boolean hasLibrary(); + /** + *
+   * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+   * "library" provides user-defined functions.
+   * Naming:
+   *   * library.function.name are in a flat namespace.
+   *     NOTE: We may need to change it to be hierarchical to support
+   *     different orgs. E.g.,
+   *     { "/google/nn", { ... }},
+   *     { "/google/vision", { ... }}
+   *     { "/org_foo/module_bar", { ... }}
+   *     map<string, FunctionDefLib> named_lib;
+   *   * If node[i].op is the name of one function in "library",
+   *     node[i] is deemed as a function call. Otherwise, node[i].op
+   *     must be a primitive operation supported by the runtime.
+   * Function call semantics:
+   *   * The callee may start execution as soon as some of its inputs
+   *     are ready. The caller may want to use Tuple() mechanism to
+   *     ensure all inputs are ready in the same time.
+   *   * The consumer of return values may start executing as soon as
+   *     the return values the consumer depends on are ready.  The
+   *     consumer may want to use Tuple() mechanism to ensure the
+   *     consumer does not start until all return values of the callee
+   *     function are ready.
+   * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + org.tensorflow.framework.FunctionDefLibrary getLibrary(); + /** + *
+   * EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
+   * "library" provides user-defined functions.
+   * Naming:
+   *   * library.function.name are in a flat namespace.
+   *     NOTE: We may need to change it to be hierarchical to support
+   *     different orgs. E.g.,
+   *     { "/google/nn", { ... }},
+   *     { "/google/vision", { ... }}
+   *     { "/org_foo/module_bar", { ... }}
+   *     map<string, FunctionDefLib> named_lib;
+   *   * If node[i].op is the name of one function in "library",
+   *     node[i] is deemed as a function call. Otherwise, node[i].op
+   *     must be a primitive operation supported by the runtime.
+   * Function call semantics:
+   *   * The callee may start execution as soon as some of its inputs
+   *     are ready. The caller may want to use Tuple() mechanism to
+   *     ensure all inputs are ready in the same time.
+   *   * The consumer of return values may start executing as soon as
+   *     the return values the consumer depends on are ready.  The
+   *     consumer may want to use Tuple() mechanism to ensure the
+   *     consumer does not start until all return values of the callee
+   *     function are ready.
+   * 
+ * + * optional .tensorflow.FunctionDefLibrary library = 2; + */ + org.tensorflow.framework.FunctionDefLibraryOrBuilder getLibraryOrBuilder(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphOptions.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphOptions.java new file mode 100644 index 0000000000000..ca024b649383a --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphOptions.java @@ -0,0 +1,1164 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.GraphOptions} + */ +public final class GraphOptions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.GraphOptions) + GraphOptionsOrBuilder { + // Use GraphOptions.newBuilder() to construct. + private GraphOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GraphOptions() { + enableRecvScheduling_ = false; + buildCostModel_ = 0L; + buildCostModelAfter_ = 0L; + inferShapes_ = false; + placePrunedGraph_ = false; + enableBfloat16Sendrecv_ = false; + timelineStep_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private GraphOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 16: { + + enableRecvScheduling_ = input.readBool(); + break; + } + case 26: { + org.tensorflow.framework.OptimizerOptions.Builder subBuilder = null; + if (optimizerOptions_ != null) { + subBuilder = optimizerOptions_.toBuilder(); + } + optimizerOptions_ = input.readMessage(org.tensorflow.framework.OptimizerOptions.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(optimizerOptions_); + optimizerOptions_ = subBuilder.buildPartial(); + } + + break; + } + case 32: { + + buildCostModel_ = input.readInt64(); + break; + } + case 40: { + + inferShapes_ = input.readBool(); + break; + } + case 48: { + + placePrunedGraph_ = input.readBool(); + break; + } + case 56: { + + enableBfloat16Sendrecv_ = input.readBool(); + break; + } + case 64: { + + timelineStep_ = input.readInt32(); + break; + } + case 72: { + + buildCostModelAfter_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GraphOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GraphOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.GraphOptions.class, org.tensorflow.framework.GraphOptions.Builder.class); + } + + public static final int ENABLE_RECV_SCHEDULING_FIELD_NUMBER = 2; + private boolean enableRecvScheduling_; + /** + *
+   * If true, use control flow to schedule the activation of Recv nodes.
+   * (Currently ignored.)
+   * 
+ * + * optional bool enable_recv_scheduling = 2; + */ + public boolean getEnableRecvScheduling() { + return enableRecvScheduling_; + } + + public static final int OPTIMIZER_OPTIONS_FIELD_NUMBER = 3; + private org.tensorflow.framework.OptimizerOptions optimizerOptions_; + /** + *
+   * Options controlling how graph is optimized.
+   * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public boolean hasOptimizerOptions() { + return optimizerOptions_ != null; + } + /** + *
+   * Options controlling how graph is optimized.
+   * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public org.tensorflow.framework.OptimizerOptions getOptimizerOptions() { + return optimizerOptions_ == null ? org.tensorflow.framework.OptimizerOptions.getDefaultInstance() : optimizerOptions_; + } + /** + *
+   * Options controlling how graph is optimized.
+   * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public org.tensorflow.framework.OptimizerOptionsOrBuilder getOptimizerOptionsOrBuilder() { + return getOptimizerOptions(); + } + + public static final int BUILD_COST_MODEL_FIELD_NUMBER = 4; + private long buildCostModel_; + /** + *
+   * The number of steps to run before returning a cost model detailing
+   * the memory usage and performance of each node of the graph. 0 means
+   * no cost model.
+   * 
+ * + * optional int64 build_cost_model = 4; + */ + public long getBuildCostModel() { + return buildCostModel_; + } + + public static final int BUILD_COST_MODEL_AFTER_FIELD_NUMBER = 9; + private long buildCostModelAfter_; + /** + *
+   * The number of steps to skip before collecting statistics for the
+   * cost model.
+   * 
+ * + * optional int64 build_cost_model_after = 9; + */ + public long getBuildCostModelAfter() { + return buildCostModelAfter_; + } + + public static final int INFER_SHAPES_FIELD_NUMBER = 5; + private boolean inferShapes_; + /** + *
+   * Annotate each Node with Op output shape data, to the extent it can
+   * be statically inferred.
+   * 
+ * + * optional bool infer_shapes = 5; + */ + public boolean getInferShapes() { + return inferShapes_; + } + + public static final int PLACE_PRUNED_GRAPH_FIELD_NUMBER = 6; + private boolean placePrunedGraph_; + /** + *
+   * Only place the subgraphs that are run, rather than the entire graph.
+   * This is useful for interactive graph building, where one might
+   * produce graphs that cannot be placed during the debugging
+   * process.  In particular, it allows the client to continue work in
+   * a session after adding a node to a graph whose placement
+   * constraints are unsatisfiable.
+   * 
+ * + * optional bool place_pruned_graph = 6; + */ + public boolean getPlacePrunedGraph() { + return placePrunedGraph_; + } + + public static final int ENABLE_BFLOAT16_SENDRECV_FIELD_NUMBER = 7; + private boolean enableBfloat16Sendrecv_; + /** + *
+   * If true, transfer float values between processes as bfloat16.
+   * 
+ * + * optional bool enable_bfloat16_sendrecv = 7; + */ + public boolean getEnableBfloat16Sendrecv() { + return enableBfloat16Sendrecv_; + } + + public static final int TIMELINE_STEP_FIELD_NUMBER = 8; + private int timelineStep_; + /** + *
+   * If > 0, record a timeline every this many steps.
+   * EXPERIMENTAL: This currently has no effect in MasterSession.
+   * 
+ * + * optional int32 timeline_step = 8; + */ + public int getTimelineStep() { + return timelineStep_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (enableRecvScheduling_ != false) { + output.writeBool(2, enableRecvScheduling_); + } + if (optimizerOptions_ != null) { + output.writeMessage(3, getOptimizerOptions()); + } + if (buildCostModel_ != 0L) { + output.writeInt64(4, buildCostModel_); + } + if (inferShapes_ != false) { + output.writeBool(5, inferShapes_); + } + if (placePrunedGraph_ != false) { + output.writeBool(6, placePrunedGraph_); + } + if (enableBfloat16Sendrecv_ != false) { + output.writeBool(7, enableBfloat16Sendrecv_); + } + if (timelineStep_ != 0) { + output.writeInt32(8, timelineStep_); + } + if (buildCostModelAfter_ != 0L) { + output.writeInt64(9, buildCostModelAfter_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (enableRecvScheduling_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, enableRecvScheduling_); + } + if (optimizerOptions_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getOptimizerOptions()); + } + if (buildCostModel_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(4, buildCostModel_); + } + if (inferShapes_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, inferShapes_); + } + if (placePrunedGraph_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(6, placePrunedGraph_); + } + if (enableBfloat16Sendrecv_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(7, enableBfloat16Sendrecv_); + } + if (timelineStep_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(8, timelineStep_); + } + if (buildCostModelAfter_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(9, buildCostModelAfter_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.GraphOptions)) { + return super.equals(obj); + } + org.tensorflow.framework.GraphOptions other = (org.tensorflow.framework.GraphOptions) obj; + + boolean result = true; + result = result && (getEnableRecvScheduling() + == other.getEnableRecvScheduling()); + result = result && (hasOptimizerOptions() == other.hasOptimizerOptions()); + if (hasOptimizerOptions()) { + result = result && getOptimizerOptions() + .equals(other.getOptimizerOptions()); + } + result = result && (getBuildCostModel() + == other.getBuildCostModel()); + result = result && (getBuildCostModelAfter() + == other.getBuildCostModelAfter()); + result = result && (getInferShapes() + == other.getInferShapes()); + result = result && (getPlacePrunedGraph() + == other.getPlacePrunedGraph()); + result = result && (getEnableBfloat16Sendrecv() + == other.getEnableBfloat16Sendrecv()); + result = result && (getTimelineStep() + == other.getTimelineStep()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + ENABLE_RECV_SCHEDULING_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getEnableRecvScheduling()); + if (hasOptimizerOptions()) { + hash = (37 * hash) + OPTIMIZER_OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getOptimizerOptions().hashCode(); + } + hash = (37 * hash) + BUILD_COST_MODEL_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getBuildCostModel()); + hash = (37 * hash) + BUILD_COST_MODEL_AFTER_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getBuildCostModelAfter()); + hash = (37 * hash) + INFER_SHAPES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getInferShapes()); + hash = (37 * hash) + PLACE_PRUNED_GRAPH_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getPlacePrunedGraph()); + hash = (37 * hash) + ENABLE_BFLOAT16_SENDRECV_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getEnableBfloat16Sendrecv()); + hash = (37 * hash) + TIMELINE_STEP_FIELD_NUMBER; + hash = (53 * hash) + getTimelineStep(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.GraphOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.GraphOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.GraphOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.GraphOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.GraphOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GraphOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.GraphOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GraphOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.GraphOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.GraphOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.GraphOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.GraphOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.GraphOptions) + org.tensorflow.framework.GraphOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GraphOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GraphOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.GraphOptions.class, org.tensorflow.framework.GraphOptions.Builder.class); + } + + // Construct using org.tensorflow.framework.GraphOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + enableRecvScheduling_ = false; + + if (optimizerOptionsBuilder_ == null) { + optimizerOptions_ = null; + } else { + optimizerOptions_ = null; + optimizerOptionsBuilder_ = null; + } + buildCostModel_ = 0L; + + buildCostModelAfter_ = 0L; + + inferShapes_ = false; + + placePrunedGraph_ = false; + + enableBfloat16Sendrecv_ = false; + + timelineStep_ = 0; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_GraphOptions_descriptor; + } + + public org.tensorflow.framework.GraphOptions getDefaultInstanceForType() { + return org.tensorflow.framework.GraphOptions.getDefaultInstance(); + } + + public org.tensorflow.framework.GraphOptions build() { + org.tensorflow.framework.GraphOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.GraphOptions buildPartial() { + org.tensorflow.framework.GraphOptions result = new org.tensorflow.framework.GraphOptions(this); + result.enableRecvScheduling_ = enableRecvScheduling_; + if (optimizerOptionsBuilder_ == null) { + result.optimizerOptions_ = optimizerOptions_; + } else { + result.optimizerOptions_ = optimizerOptionsBuilder_.build(); + } + result.buildCostModel_ = buildCostModel_; + result.buildCostModelAfter_ = buildCostModelAfter_; + result.inferShapes_ = inferShapes_; + result.placePrunedGraph_ = placePrunedGraph_; + result.enableBfloat16Sendrecv_ = enableBfloat16Sendrecv_; + result.timelineStep_ = timelineStep_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.GraphOptions) { + return mergeFrom((org.tensorflow.framework.GraphOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.GraphOptions other) { + if (other == org.tensorflow.framework.GraphOptions.getDefaultInstance()) return this; + if (other.getEnableRecvScheduling() != false) { + setEnableRecvScheduling(other.getEnableRecvScheduling()); + } + if (other.hasOptimizerOptions()) { + mergeOptimizerOptions(other.getOptimizerOptions()); + } + if (other.getBuildCostModel() != 0L) { + setBuildCostModel(other.getBuildCostModel()); + } + if (other.getBuildCostModelAfter() != 0L) { + setBuildCostModelAfter(other.getBuildCostModelAfter()); + } + if (other.getInferShapes() != false) { + setInferShapes(other.getInferShapes()); + } + if (other.getPlacePrunedGraph() != false) { + setPlacePrunedGraph(other.getPlacePrunedGraph()); + } + if (other.getEnableBfloat16Sendrecv() != false) { + setEnableBfloat16Sendrecv(other.getEnableBfloat16Sendrecv()); + } + if (other.getTimelineStep() != 0) { + setTimelineStep(other.getTimelineStep()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.GraphOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.GraphOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private boolean enableRecvScheduling_ ; + /** + *
+     * If true, use control flow to schedule the activation of Recv nodes.
+     * (Currently ignored.)
+     * 
+ * + * optional bool enable_recv_scheduling = 2; + */ + public boolean getEnableRecvScheduling() { + return enableRecvScheduling_; + } + /** + *
+     * If true, use control flow to schedule the activation of Recv nodes.
+     * (Currently ignored.)
+     * 
+ * + * optional bool enable_recv_scheduling = 2; + */ + public Builder setEnableRecvScheduling(boolean value) { + + enableRecvScheduling_ = value; + onChanged(); + return this; + } + /** + *
+     * If true, use control flow to schedule the activation of Recv nodes.
+     * (Currently ignored.)
+     * 
+ * + * optional bool enable_recv_scheduling = 2; + */ + public Builder clearEnableRecvScheduling() { + + enableRecvScheduling_ = false; + onChanged(); + return this; + } + + private org.tensorflow.framework.OptimizerOptions optimizerOptions_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.OptimizerOptions, org.tensorflow.framework.OptimizerOptions.Builder, org.tensorflow.framework.OptimizerOptionsOrBuilder> optimizerOptionsBuilder_; + /** + *
+     * Options controlling how graph is optimized.
+     * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public boolean hasOptimizerOptions() { + return optimizerOptionsBuilder_ != null || optimizerOptions_ != null; + } + /** + *
+     * Options controlling how graph is optimized.
+     * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public org.tensorflow.framework.OptimizerOptions getOptimizerOptions() { + if (optimizerOptionsBuilder_ == null) { + return optimizerOptions_ == null ? org.tensorflow.framework.OptimizerOptions.getDefaultInstance() : optimizerOptions_; + } else { + return optimizerOptionsBuilder_.getMessage(); + } + } + /** + *
+     * Options controlling how graph is optimized.
+     * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public Builder setOptimizerOptions(org.tensorflow.framework.OptimizerOptions value) { + if (optimizerOptionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + optimizerOptions_ = value; + onChanged(); + } else { + optimizerOptionsBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Options controlling how graph is optimized.
+     * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public Builder setOptimizerOptions( + org.tensorflow.framework.OptimizerOptions.Builder builderForValue) { + if (optimizerOptionsBuilder_ == null) { + optimizerOptions_ = builderForValue.build(); + onChanged(); + } else { + optimizerOptionsBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Options controlling how graph is optimized.
+     * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public Builder mergeOptimizerOptions(org.tensorflow.framework.OptimizerOptions value) { + if (optimizerOptionsBuilder_ == null) { + if (optimizerOptions_ != null) { + optimizerOptions_ = + org.tensorflow.framework.OptimizerOptions.newBuilder(optimizerOptions_).mergeFrom(value).buildPartial(); + } else { + optimizerOptions_ = value; + } + onChanged(); + } else { + optimizerOptionsBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Options controlling how graph is optimized.
+     * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public Builder clearOptimizerOptions() { + if (optimizerOptionsBuilder_ == null) { + optimizerOptions_ = null; + onChanged(); + } else { + optimizerOptions_ = null; + optimizerOptionsBuilder_ = null; + } + + return this; + } + /** + *
+     * Options controlling how graph is optimized.
+     * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public org.tensorflow.framework.OptimizerOptions.Builder getOptimizerOptionsBuilder() { + + onChanged(); + return getOptimizerOptionsFieldBuilder().getBuilder(); + } + /** + *
+     * Options controlling how graph is optimized.
+     * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + public org.tensorflow.framework.OptimizerOptionsOrBuilder getOptimizerOptionsOrBuilder() { + if (optimizerOptionsBuilder_ != null) { + return optimizerOptionsBuilder_.getMessageOrBuilder(); + } else { + return optimizerOptions_ == null ? + org.tensorflow.framework.OptimizerOptions.getDefaultInstance() : optimizerOptions_; + } + } + /** + *
+     * Options controlling how graph is optimized.
+     * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.OptimizerOptions, org.tensorflow.framework.OptimizerOptions.Builder, org.tensorflow.framework.OptimizerOptionsOrBuilder> + getOptimizerOptionsFieldBuilder() { + if (optimizerOptionsBuilder_ == null) { + optimizerOptionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.OptimizerOptions, org.tensorflow.framework.OptimizerOptions.Builder, org.tensorflow.framework.OptimizerOptionsOrBuilder>( + getOptimizerOptions(), + getParentForChildren(), + isClean()); + optimizerOptions_ = null; + } + return optimizerOptionsBuilder_; + } + + private long buildCostModel_ ; + /** + *
+     * The number of steps to run before returning a cost model detailing
+     * the memory usage and performance of each node of the graph. 0 means
+     * no cost model.
+     * 
+ * + * optional int64 build_cost_model = 4; + */ + public long getBuildCostModel() { + return buildCostModel_; + } + /** + *
+     * The number of steps to run before returning a cost model detailing
+     * the memory usage and performance of each node of the graph. 0 means
+     * no cost model.
+     * 
+ * + * optional int64 build_cost_model = 4; + */ + public Builder setBuildCostModel(long value) { + + buildCostModel_ = value; + onChanged(); + return this; + } + /** + *
+     * The number of steps to run before returning a cost model detailing
+     * the memory usage and performance of each node of the graph. 0 means
+     * no cost model.
+     * 
+ * + * optional int64 build_cost_model = 4; + */ + public Builder clearBuildCostModel() { + + buildCostModel_ = 0L; + onChanged(); + return this; + } + + private long buildCostModelAfter_ ; + /** + *
+     * The number of steps to skip before collecting statistics for the
+     * cost model.
+     * 
+ * + * optional int64 build_cost_model_after = 9; + */ + public long getBuildCostModelAfter() { + return buildCostModelAfter_; + } + /** + *
+     * The number of steps to skip before collecting statistics for the
+     * cost model.
+     * 
+ * + * optional int64 build_cost_model_after = 9; + */ + public Builder setBuildCostModelAfter(long value) { + + buildCostModelAfter_ = value; + onChanged(); + return this; + } + /** + *
+     * The number of steps to skip before collecting statistics for the
+     * cost model.
+     * 
+ * + * optional int64 build_cost_model_after = 9; + */ + public Builder clearBuildCostModelAfter() { + + buildCostModelAfter_ = 0L; + onChanged(); + return this; + } + + private boolean inferShapes_ ; + /** + *
+     * Annotate each Node with Op output shape data, to the extent it can
+     * be statically inferred.
+     * 
+ * + * optional bool infer_shapes = 5; + */ + public boolean getInferShapes() { + return inferShapes_; + } + /** + *
+     * Annotate each Node with Op output shape data, to the extent it can
+     * be statically inferred.
+     * 
+ * + * optional bool infer_shapes = 5; + */ + public Builder setInferShapes(boolean value) { + + inferShapes_ = value; + onChanged(); + return this; + } + /** + *
+     * Annotate each Node with Op output shape data, to the extent it can
+     * be statically inferred.
+     * 
+ * + * optional bool infer_shapes = 5; + */ + public Builder clearInferShapes() { + + inferShapes_ = false; + onChanged(); + return this; + } + + private boolean placePrunedGraph_ ; + /** + *
+     * Only place the subgraphs that are run, rather than the entire graph.
+     * This is useful for interactive graph building, where one might
+     * produce graphs that cannot be placed during the debugging
+     * process.  In particular, it allows the client to continue work in
+     * a session after adding a node to a graph whose placement
+     * constraints are unsatisfiable.
+     * 
+ * + * optional bool place_pruned_graph = 6; + */ + public boolean getPlacePrunedGraph() { + return placePrunedGraph_; + } + /** + *
+     * Only place the subgraphs that are run, rather than the entire graph.
+     * This is useful for interactive graph building, where one might
+     * produce graphs that cannot be placed during the debugging
+     * process.  In particular, it allows the client to continue work in
+     * a session after adding a node to a graph whose placement
+     * constraints are unsatisfiable.
+     * 
+ * + * optional bool place_pruned_graph = 6; + */ + public Builder setPlacePrunedGraph(boolean value) { + + placePrunedGraph_ = value; + onChanged(); + return this; + } + /** + *
+     * Only place the subgraphs that are run, rather than the entire graph.
+     * This is useful for interactive graph building, where one might
+     * produce graphs that cannot be placed during the debugging
+     * process.  In particular, it allows the client to continue work in
+     * a session after adding a node to a graph whose placement
+     * constraints are unsatisfiable.
+     * 
+ * + * optional bool place_pruned_graph = 6; + */ + public Builder clearPlacePrunedGraph() { + + placePrunedGraph_ = false; + onChanged(); + return this; + } + + private boolean enableBfloat16Sendrecv_ ; + /** + *
+     * If true, transfer float values between processes as bfloat16.
+     * 
+ * + * optional bool enable_bfloat16_sendrecv = 7; + */ + public boolean getEnableBfloat16Sendrecv() { + return enableBfloat16Sendrecv_; + } + /** + *
+     * If true, transfer float values between processes as bfloat16.
+     * 
+ * + * optional bool enable_bfloat16_sendrecv = 7; + */ + public Builder setEnableBfloat16Sendrecv(boolean value) { + + enableBfloat16Sendrecv_ = value; + onChanged(); + return this; + } + /** + *
+     * If true, transfer float values between processes as bfloat16.
+     * 
+ * + * optional bool enable_bfloat16_sendrecv = 7; + */ + public Builder clearEnableBfloat16Sendrecv() { + + enableBfloat16Sendrecv_ = false; + onChanged(); + return this; + } + + private int timelineStep_ ; + /** + *
+     * If > 0, record a timeline every this many steps.
+     * EXPERIMENTAL: This currently has no effect in MasterSession.
+     * 
+ * + * optional int32 timeline_step = 8; + */ + public int getTimelineStep() { + return timelineStep_; + } + /** + *
+     * If > 0, record a timeline every this many steps.
+     * EXPERIMENTAL: This currently has no effect in MasterSession.
+     * 
+ * + * optional int32 timeline_step = 8; + */ + public Builder setTimelineStep(int value) { + + timelineStep_ = value; + onChanged(); + return this; + } + /** + *
+     * If > 0, record a timeline every this many steps.
+     * EXPERIMENTAL: This currently has no effect in MasterSession.
+     * 
+ * + * optional int32 timeline_step = 8; + */ + public Builder clearTimelineStep() { + + timelineStep_ = 0; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.GraphOptions) + } + + // @@protoc_insertion_point(class_scope:tensorflow.GraphOptions) + private static final org.tensorflow.framework.GraphOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.GraphOptions(); + } + + public static org.tensorflow.framework.GraphOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public GraphOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GraphOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.GraphOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphOptionsOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphOptionsOrBuilder.java new file mode 100644 index 0000000000000..bf23f443e87f5 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphOptionsOrBuilder.java @@ -0,0 +1,108 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +public interface GraphOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.GraphOptions) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * If true, use control flow to schedule the activation of Recv nodes.
+   * (Currently ignored.)
+   * 
+ * + * optional bool enable_recv_scheduling = 2; + */ + boolean getEnableRecvScheduling(); + + /** + *
+   * Options controlling how graph is optimized.
+   * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + boolean hasOptimizerOptions(); + /** + *
+   * Options controlling how graph is optimized.
+   * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + org.tensorflow.framework.OptimizerOptions getOptimizerOptions(); + /** + *
+   * Options controlling how graph is optimized.
+   * 
+ * + * optional .tensorflow.OptimizerOptions optimizer_options = 3; + */ + org.tensorflow.framework.OptimizerOptionsOrBuilder getOptimizerOptionsOrBuilder(); + + /** + *
+   * The number of steps to run before returning a cost model detailing
+   * the memory usage and performance of each node of the graph. 0 means
+   * no cost model.
+   * 
+ * + * optional int64 build_cost_model = 4; + */ + long getBuildCostModel(); + + /** + *
+   * The number of steps to skip before collecting statistics for the
+   * cost model.
+   * 
+ * + * optional int64 build_cost_model_after = 9; + */ + long getBuildCostModelAfter(); + + /** + *
+   * Annotate each Node with Op output shape data, to the extent it can
+   * be statically inferred.
+   * 
+ * + * optional bool infer_shapes = 5; + */ + boolean getInferShapes(); + + /** + *
+   * Only place the subgraphs that are run, rather than the entire graph.
+   * This is useful for interactive graph building, where one might
+   * produce graphs that cannot be placed during the debugging
+   * process.  In particular, it allows the client to continue work in
+   * a session after adding a node to a graph whose placement
+   * constraints are unsatisfiable.
+   * 
+ * + * optional bool place_pruned_graph = 6; + */ + boolean getPlacePrunedGraph(); + + /** + *
+   * If true, transfer float values between processes as bfloat16.
+   * 
+ * + * optional bool enable_bfloat16_sendrecv = 7; + */ + boolean getEnableBfloat16Sendrecv(); + + /** + *
+   * If > 0, record a timeline every this many steps.
+   * EXPERIMENTAL: This currently has no effect in MasterSession.
+   * 
+ * + * optional int32 timeline_step = 8; + */ + int getTimelineStep(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphProtos.java new file mode 100644 index 0000000000000..42fd464248fb2 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/GraphProtos.java @@ -0,0 +1,69 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/graph.proto + +package org.tensorflow.framework; + +public final class GraphProtos { + private GraphProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_GraphDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_GraphDef_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n%tensorflow/core/framework/graph.proto\022" + + "\ntensorflow\032(tensorflow/core/framework/n" + + "ode_def.proto\032(tensorflow/core/framework" + + "/function.proto\032(tensorflow/core/framewo" + + "rk/versions.proto\"\235\001\n\010GraphDef\022!\n\004node\030\001" + + " \003(\0132\023.tensorflow.NodeDef\022(\n\010versions\030\004 " + + "\001(\0132\026.tensorflow.VersionDef\022\023\n\007version\030\003" + + " \001(\005B\002\030\001\022/\n\007library\030\002 \001(\0132\036.tensorflow.F" + + "unctionDefLibraryB,\n\030org.tensorflow.fram" + + "eworkB\013GraphProtosP\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.NodeProto.getDescriptor(), + org.tensorflow.framework.FunctionProtos.getDescriptor(), + org.tensorflow.framework.VersionsProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_GraphDef_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_GraphDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_GraphDef_descriptor, + new java.lang.String[] { "Node", "Versions", "Version", "Library", }); + org.tensorflow.framework.NodeProto.getDescriptor(); + org.tensorflow.framework.FunctionProtos.getDescriptor(); + org.tensorflow.framework.VersionsProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/HistogramProto.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/HistogramProto.java new file mode 100644 index 0000000000000..87f284d9bbbac --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/HistogramProto.java @@ -0,0 +1,1078 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/summary.proto + +package org.tensorflow.framework; + +/** + *
+ * Serialization format for histogram module in
+ * core/lib/histogram/histogram.h
+ * 
+ * + * Protobuf type {@code tensorflow.HistogramProto} + */ +public final class HistogramProto extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.HistogramProto) + HistogramProtoOrBuilder { + // Use HistogramProto.newBuilder() to construct. + private HistogramProto(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private HistogramProto() { + min_ = 0D; + max_ = 0D; + num_ = 0D; + sum_ = 0D; + sumSquares_ = 0D; + bucketLimit_ = java.util.Collections.emptyList(); + bucket_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private HistogramProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 9: { + + min_ = input.readDouble(); + break; + } + case 17: { + + max_ = input.readDouble(); + break; + } + case 25: { + + num_ = input.readDouble(); + break; + } + case 33: { + + sum_ = input.readDouble(); + break; + } + case 41: { + + sumSquares_ = input.readDouble(); + break; + } + case 49: { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + bucketLimit_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000020; + } + bucketLimit_.add(input.readDouble()); + break; + } + case 50: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020) && input.getBytesUntilLimit() > 0) { + bucketLimit_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000020; + } + while (input.getBytesUntilLimit() > 0) { + bucketLimit_.add(input.readDouble()); + } + input.popLimit(limit); + break; + } + case 57: { + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + bucket_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000040; + } + bucket_.add(input.readDouble()); + break; + } + case 58: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) { + bucket_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000040; + } + while (input.getBytesUntilLimit() > 0) { + bucket_.add(input.readDouble()); + } + input.popLimit(limit); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + bucketLimit_ = java.util.Collections.unmodifiableList(bucketLimit_); + } + if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + bucket_ = java.util.Collections.unmodifiableList(bucket_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_HistogramProto_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_HistogramProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.HistogramProto.class, org.tensorflow.framework.HistogramProto.Builder.class); + } + + private int bitField0_; + public static final int MIN_FIELD_NUMBER = 1; + private double min_; + /** + * optional double min = 1; + */ + public double getMin() { + return min_; + } + + public static final int MAX_FIELD_NUMBER = 2; + private double max_; + /** + * optional double max = 2; + */ + public double getMax() { + return max_; + } + + public static final int NUM_FIELD_NUMBER = 3; + private double num_; + /** + * optional double num = 3; + */ + public double getNum() { + return num_; + } + + public static final int SUM_FIELD_NUMBER = 4; + private double sum_; + /** + * optional double sum = 4; + */ + public double getSum() { + return sum_; + } + + public static final int SUM_SQUARES_FIELD_NUMBER = 5; + private double sumSquares_; + /** + * optional double sum_squares = 5; + */ + public double getSumSquares() { + return sumSquares_; + } + + public static final int BUCKET_LIMIT_FIELD_NUMBER = 6; + private java.util.List bucketLimit_; + /** + *
+   * Parallel arrays encoding the bucket boundaries and the bucket values.
+   * bucket(i) is the count for the bucket i.  The range for
+   * a bucket is:
+   *   i == 0:  -DBL_MAX .. bucket_limit(0)
+   *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+   * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public java.util.List + getBucketLimitList() { + return bucketLimit_; + } + /** + *
+   * Parallel arrays encoding the bucket boundaries and the bucket values.
+   * bucket(i) is the count for the bucket i.  The range for
+   * a bucket is:
+   *   i == 0:  -DBL_MAX .. bucket_limit(0)
+   *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+   * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public int getBucketLimitCount() { + return bucketLimit_.size(); + } + /** + *
+   * Parallel arrays encoding the bucket boundaries and the bucket values.
+   * bucket(i) is the count for the bucket i.  The range for
+   * a bucket is:
+   *   i == 0:  -DBL_MAX .. bucket_limit(0)
+   *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+   * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public double getBucketLimit(int index) { + return bucketLimit_.get(index); + } + private int bucketLimitMemoizedSerializedSize = -1; + + public static final int BUCKET_FIELD_NUMBER = 7; + private java.util.List bucket_; + /** + * repeated double bucket = 7 [packed = true]; + */ + public java.util.List + getBucketList() { + return bucket_; + } + /** + * repeated double bucket = 7 [packed = true]; + */ + public int getBucketCount() { + return bucket_.size(); + } + /** + * repeated double bucket = 7 [packed = true]; + */ + public double getBucket(int index) { + return bucket_.get(index); + } + private int bucketMemoizedSerializedSize = -1; + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (min_ != 0D) { + output.writeDouble(1, min_); + } + if (max_ != 0D) { + output.writeDouble(2, max_); + } + if (num_ != 0D) { + output.writeDouble(3, num_); + } + if (sum_ != 0D) { + output.writeDouble(4, sum_); + } + if (sumSquares_ != 0D) { + output.writeDouble(5, sumSquares_); + } + if (getBucketLimitList().size() > 0) { + output.writeUInt32NoTag(50); + output.writeUInt32NoTag(bucketLimitMemoizedSerializedSize); + } + for (int i = 0; i < bucketLimit_.size(); i++) { + output.writeDoubleNoTag(bucketLimit_.get(i)); + } + if (getBucketList().size() > 0) { + output.writeUInt32NoTag(58); + output.writeUInt32NoTag(bucketMemoizedSerializedSize); + } + for (int i = 0; i < bucket_.size(); i++) { + output.writeDoubleNoTag(bucket_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (min_ != 0D) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(1, min_); + } + if (max_ != 0D) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(2, max_); + } + if (num_ != 0D) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(3, num_); + } + if (sum_ != 0D) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(4, sum_); + } + if (sumSquares_ != 0D) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(5, sumSquares_); + } + { + int dataSize = 0; + dataSize = 8 * getBucketLimitList().size(); + size += dataSize; + if (!getBucketLimitList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + bucketLimitMemoizedSerializedSize = dataSize; + } + { + int dataSize = 0; + dataSize = 8 * getBucketList().size(); + size += dataSize; + if (!getBucketList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + bucketMemoizedSerializedSize = dataSize; + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.HistogramProto)) { + return super.equals(obj); + } + org.tensorflow.framework.HistogramProto other = (org.tensorflow.framework.HistogramProto) obj; + + boolean result = true; + result = result && ( + java.lang.Double.doubleToLongBits(getMin()) + == java.lang.Double.doubleToLongBits( + other.getMin())); + result = result && ( + java.lang.Double.doubleToLongBits(getMax()) + == java.lang.Double.doubleToLongBits( + other.getMax())); + result = result && ( + java.lang.Double.doubleToLongBits(getNum()) + == java.lang.Double.doubleToLongBits( + other.getNum())); + result = result && ( + java.lang.Double.doubleToLongBits(getSum()) + == java.lang.Double.doubleToLongBits( + other.getSum())); + result = result && ( + java.lang.Double.doubleToLongBits(getSumSquares()) + == java.lang.Double.doubleToLongBits( + other.getSumSquares())); + result = result && getBucketLimitList() + .equals(other.getBucketLimitList()); + result = result && getBucketList() + .equals(other.getBucketList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + MIN_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getMin())); + hash = (37 * hash) + MAX_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getMax())); + hash = (37 * hash) + NUM_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getNum())); + hash = (37 * hash) + SUM_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getSum())); + hash = (37 * hash) + SUM_SQUARES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getSumSquares())); + if (getBucketLimitCount() > 0) { + hash = (37 * hash) + BUCKET_LIMIT_FIELD_NUMBER; + hash = (53 * hash) + getBucketLimitList().hashCode(); + } + if (getBucketCount() > 0) { + hash = (37 * hash) + BUCKET_FIELD_NUMBER; + hash = (53 * hash) + getBucketList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.HistogramProto parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.HistogramProto parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.HistogramProto parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.HistogramProto parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.HistogramProto parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.HistogramProto parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.HistogramProto parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.HistogramProto parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.HistogramProto parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.HistogramProto parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.HistogramProto prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Serialization format for histogram module in
+   * core/lib/histogram/histogram.h
+   * 
+ * + * Protobuf type {@code tensorflow.HistogramProto} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.HistogramProto) + org.tensorflow.framework.HistogramProtoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_HistogramProto_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_HistogramProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.HistogramProto.class, org.tensorflow.framework.HistogramProto.Builder.class); + } + + // Construct using org.tensorflow.framework.HistogramProto.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + min_ = 0D; + + max_ = 0D; + + num_ = 0D; + + sum_ = 0D; + + sumSquares_ = 0D; + + bucketLimit_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + bucket_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_HistogramProto_descriptor; + } + + public org.tensorflow.framework.HistogramProto getDefaultInstanceForType() { + return org.tensorflow.framework.HistogramProto.getDefaultInstance(); + } + + public org.tensorflow.framework.HistogramProto build() { + org.tensorflow.framework.HistogramProto result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.HistogramProto buildPartial() { + org.tensorflow.framework.HistogramProto result = new org.tensorflow.framework.HistogramProto(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.min_ = min_; + result.max_ = max_; + result.num_ = num_; + result.sum_ = sum_; + result.sumSquares_ = sumSquares_; + if (((bitField0_ & 0x00000020) == 0x00000020)) { + bucketLimit_ = java.util.Collections.unmodifiableList(bucketLimit_); + bitField0_ = (bitField0_ & ~0x00000020); + } + result.bucketLimit_ = bucketLimit_; + if (((bitField0_ & 0x00000040) == 0x00000040)) { + bucket_ = java.util.Collections.unmodifiableList(bucket_); + bitField0_ = (bitField0_ & ~0x00000040); + } + result.bucket_ = bucket_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.HistogramProto) { + return mergeFrom((org.tensorflow.framework.HistogramProto)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.HistogramProto other) { + if (other == org.tensorflow.framework.HistogramProto.getDefaultInstance()) return this; + if (other.getMin() != 0D) { + setMin(other.getMin()); + } + if (other.getMax() != 0D) { + setMax(other.getMax()); + } + if (other.getNum() != 0D) { + setNum(other.getNum()); + } + if (other.getSum() != 0D) { + setSum(other.getSum()); + } + if (other.getSumSquares() != 0D) { + setSumSquares(other.getSumSquares()); + } + if (!other.bucketLimit_.isEmpty()) { + if (bucketLimit_.isEmpty()) { + bucketLimit_ = other.bucketLimit_; + bitField0_ = (bitField0_ & ~0x00000020); + } else { + ensureBucketLimitIsMutable(); + bucketLimit_.addAll(other.bucketLimit_); + } + onChanged(); + } + if (!other.bucket_.isEmpty()) { + if (bucket_.isEmpty()) { + bucket_ = other.bucket_; + bitField0_ = (bitField0_ & ~0x00000040); + } else { + ensureBucketIsMutable(); + bucket_.addAll(other.bucket_); + } + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.HistogramProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.HistogramProto) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private double min_ ; + /** + * optional double min = 1; + */ + public double getMin() { + return min_; + } + /** + * optional double min = 1; + */ + public Builder setMin(double value) { + + min_ = value; + onChanged(); + return this; + } + /** + * optional double min = 1; + */ + public Builder clearMin() { + + min_ = 0D; + onChanged(); + return this; + } + + private double max_ ; + /** + * optional double max = 2; + */ + public double getMax() { + return max_; + } + /** + * optional double max = 2; + */ + public Builder setMax(double value) { + + max_ = value; + onChanged(); + return this; + } + /** + * optional double max = 2; + */ + public Builder clearMax() { + + max_ = 0D; + onChanged(); + return this; + } + + private double num_ ; + /** + * optional double num = 3; + */ + public double getNum() { + return num_; + } + /** + * optional double num = 3; + */ + public Builder setNum(double value) { + + num_ = value; + onChanged(); + return this; + } + /** + * optional double num = 3; + */ + public Builder clearNum() { + + num_ = 0D; + onChanged(); + return this; + } + + private double sum_ ; + /** + * optional double sum = 4; + */ + public double getSum() { + return sum_; + } + /** + * optional double sum = 4; + */ + public Builder setSum(double value) { + + sum_ = value; + onChanged(); + return this; + } + /** + * optional double sum = 4; + */ + public Builder clearSum() { + + sum_ = 0D; + onChanged(); + return this; + } + + private double sumSquares_ ; + /** + * optional double sum_squares = 5; + */ + public double getSumSquares() { + return sumSquares_; + } + /** + * optional double sum_squares = 5; + */ + public Builder setSumSquares(double value) { + + sumSquares_ = value; + onChanged(); + return this; + } + /** + * optional double sum_squares = 5; + */ + public Builder clearSumSquares() { + + sumSquares_ = 0D; + onChanged(); + return this; + } + + private java.util.List bucketLimit_ = java.util.Collections.emptyList(); + private void ensureBucketLimitIsMutable() { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { + bucketLimit_ = new java.util.ArrayList(bucketLimit_); + bitField0_ |= 0x00000020; + } + } + /** + *
+     * Parallel arrays encoding the bucket boundaries and the bucket values.
+     * bucket(i) is the count for the bucket i.  The range for
+     * a bucket is:
+     *   i == 0:  -DBL_MAX .. bucket_limit(0)
+     *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+     * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public java.util.List + getBucketLimitList() { + return java.util.Collections.unmodifiableList(bucketLimit_); + } + /** + *
+     * Parallel arrays encoding the bucket boundaries and the bucket values.
+     * bucket(i) is the count for the bucket i.  The range for
+     * a bucket is:
+     *   i == 0:  -DBL_MAX .. bucket_limit(0)
+     *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+     * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public int getBucketLimitCount() { + return bucketLimit_.size(); + } + /** + *
+     * Parallel arrays encoding the bucket boundaries and the bucket values.
+     * bucket(i) is the count for the bucket i.  The range for
+     * a bucket is:
+     *   i == 0:  -DBL_MAX .. bucket_limit(0)
+     *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+     * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public double getBucketLimit(int index) { + return bucketLimit_.get(index); + } + /** + *
+     * Parallel arrays encoding the bucket boundaries and the bucket values.
+     * bucket(i) is the count for the bucket i.  The range for
+     * a bucket is:
+     *   i == 0:  -DBL_MAX .. bucket_limit(0)
+     *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+     * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public Builder setBucketLimit( + int index, double value) { + ensureBucketLimitIsMutable(); + bucketLimit_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Parallel arrays encoding the bucket boundaries and the bucket values.
+     * bucket(i) is the count for the bucket i.  The range for
+     * a bucket is:
+     *   i == 0:  -DBL_MAX .. bucket_limit(0)
+     *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+     * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public Builder addBucketLimit(double value) { + ensureBucketLimitIsMutable(); + bucketLimit_.add(value); + onChanged(); + return this; + } + /** + *
+     * Parallel arrays encoding the bucket boundaries and the bucket values.
+     * bucket(i) is the count for the bucket i.  The range for
+     * a bucket is:
+     *   i == 0:  -DBL_MAX .. bucket_limit(0)
+     *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+     * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public Builder addAllBucketLimit( + java.lang.Iterable values) { + ensureBucketLimitIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, bucketLimit_); + onChanged(); + return this; + } + /** + *
+     * Parallel arrays encoding the bucket boundaries and the bucket values.
+     * bucket(i) is the count for the bucket i.  The range for
+     * a bucket is:
+     *   i == 0:  -DBL_MAX .. bucket_limit(0)
+     *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+     * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + public Builder clearBucketLimit() { + bucketLimit_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + return this; + } + + private java.util.List bucket_ = java.util.Collections.emptyList(); + private void ensureBucketIsMutable() { + if (!((bitField0_ & 0x00000040) == 0x00000040)) { + bucket_ = new java.util.ArrayList(bucket_); + bitField0_ |= 0x00000040; + } + } + /** + * repeated double bucket = 7 [packed = true]; + */ + public java.util.List + getBucketList() { + return java.util.Collections.unmodifiableList(bucket_); + } + /** + * repeated double bucket = 7 [packed = true]; + */ + public int getBucketCount() { + return bucket_.size(); + } + /** + * repeated double bucket = 7 [packed = true]; + */ + public double getBucket(int index) { + return bucket_.get(index); + } + /** + * repeated double bucket = 7 [packed = true]; + */ + public Builder setBucket( + int index, double value) { + ensureBucketIsMutable(); + bucket_.set(index, value); + onChanged(); + return this; + } + /** + * repeated double bucket = 7 [packed = true]; + */ + public Builder addBucket(double value) { + ensureBucketIsMutable(); + bucket_.add(value); + onChanged(); + return this; + } + /** + * repeated double bucket = 7 [packed = true]; + */ + public Builder addAllBucket( + java.lang.Iterable values) { + ensureBucketIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, bucket_); + onChanged(); + return this; + } + /** + * repeated double bucket = 7 [packed = true]; + */ + public Builder clearBucket() { + bucket_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.HistogramProto) + } + + // @@protoc_insertion_point(class_scope:tensorflow.HistogramProto) + private static final org.tensorflow.framework.HistogramProto DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.HistogramProto(); + } + + public static org.tensorflow.framework.HistogramProto getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public HistogramProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new HistogramProto(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.HistogramProto getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/HistogramProtoOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/HistogramProtoOrBuilder.java new file mode 100644 index 0000000000000..6cfe60d6d035f --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/HistogramProtoOrBuilder.java @@ -0,0 +1,84 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/summary.proto + +package org.tensorflow.framework; + +public interface HistogramProtoOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.HistogramProto) + com.google.protobuf.MessageOrBuilder { + + /** + * optional double min = 1; + */ + double getMin(); + + /** + * optional double max = 2; + */ + double getMax(); + + /** + * optional double num = 3; + */ + double getNum(); + + /** + * optional double sum = 4; + */ + double getSum(); + + /** + * optional double sum_squares = 5; + */ + double getSumSquares(); + + /** + *
+   * Parallel arrays encoding the bucket boundaries and the bucket values.
+   * bucket(i) is the count for the bucket i.  The range for
+   * a bucket is:
+   *   i == 0:  -DBL_MAX .. bucket_limit(0)
+   *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+   * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + java.util.List getBucketLimitList(); + /** + *
+   * Parallel arrays encoding the bucket boundaries and the bucket values.
+   * bucket(i) is the count for the bucket i.  The range for
+   * a bucket is:
+   *   i == 0:  -DBL_MAX .. bucket_limit(0)
+   *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+   * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + int getBucketLimitCount(); + /** + *
+   * Parallel arrays encoding the bucket boundaries and the bucket values.
+   * bucket(i) is the count for the bucket i.  The range for
+   * a bucket is:
+   *   i == 0:  -DBL_MAX .. bucket_limit(0)
+   *   i != 0:  bucket_limit(i-1) .. bucket_limit(i)
+   * 
+ * + * repeated double bucket_limit = 6 [packed = true]; + */ + double getBucketLimit(int index); + + /** + * repeated double bucket = 7 [packed = true]; + */ + java.util.List getBucketList(); + /** + * repeated double bucket = 7 [packed = true]; + */ + int getBucketCount(); + /** + * repeated double bucket = 7 [packed = true]; + */ + double getBucket(int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDef.java new file mode 100644 index 0000000000000..f19c286b8faaf --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDef.java @@ -0,0 +1,2237 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/kernel_def.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.KernelDef} + */ +public final class KernelDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.KernelDef) + KernelDefOrBuilder { + // Use KernelDef.newBuilder() to construct. + private KernelDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private KernelDef() { + op_ = ""; + deviceType_ = ""; + constraint_ = java.util.Collections.emptyList(); + hostMemoryArg_ = com.google.protobuf.LazyStringArrayList.EMPTY; + label_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private KernelDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + op_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + deviceType_ = s; + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + constraint_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + constraint_.add( + input.readMessage(org.tensorflow.framework.KernelDef.AttrConstraint.parser(), extensionRegistry)); + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + hostMemoryArg_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000008; + } + hostMemoryArg_.add(s); + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + label_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + constraint_ = java.util.Collections.unmodifiableList(constraint_); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + hostMemoryArg_ = hostMemoryArg_.getUnmodifiableView(); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.KernelDef.class, org.tensorflow.framework.KernelDef.Builder.class); + } + + public interface AttrConstraintOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.KernelDef.AttrConstraint) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Name of an attr from the Op.
+     * 
+ * + * optional string name = 1; + */ + java.lang.String getName(); + /** + *
+     * Name of an attr from the Op.
+     * 
+ * + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+     * A list of values that this kernel supports for this attr.
+     * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + boolean hasAllowedValues(); + /** + *
+     * A list of values that this kernel supports for this attr.
+     * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + org.tensorflow.framework.AttrValue getAllowedValues(); + /** + *
+     * A list of values that this kernel supports for this attr.
+     * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + org.tensorflow.framework.AttrValueOrBuilder getAllowedValuesOrBuilder(); + } + /** + * Protobuf type {@code tensorflow.KernelDef.AttrConstraint} + */ + public static final class AttrConstraint extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.KernelDef.AttrConstraint) + AttrConstraintOrBuilder { + // Use AttrConstraint.newBuilder() to construct. + private AttrConstraint(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private AttrConstraint() { + name_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private AttrConstraint( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + org.tensorflow.framework.AttrValue.Builder subBuilder = null; + if (allowedValues_ != null) { + subBuilder = allowedValues_.toBuilder(); + } + allowedValues_ = input.readMessage(org.tensorflow.framework.AttrValue.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(allowedValues_); + allowedValues_ = subBuilder.buildPartial(); + } + + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_AttrConstraint_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_AttrConstraint_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.KernelDef.AttrConstraint.class, org.tensorflow.framework.KernelDef.AttrConstraint.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+     * Name of an attr from the Op.
+     * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * Name of an attr from the Op.
+     * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ALLOWED_VALUES_FIELD_NUMBER = 2; + private org.tensorflow.framework.AttrValue allowedValues_; + /** + *
+     * A list of values that this kernel supports for this attr.
+     * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public boolean hasAllowedValues() { + return allowedValues_ != null; + } + /** + *
+     * A list of values that this kernel supports for this attr.
+     * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public org.tensorflow.framework.AttrValue getAllowedValues() { + return allowedValues_ == null ? org.tensorflow.framework.AttrValue.getDefaultInstance() : allowedValues_; + } + /** + *
+     * A list of values that this kernel supports for this attr.
+     * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public org.tensorflow.framework.AttrValueOrBuilder getAllowedValuesOrBuilder() { + return getAllowedValues(); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (allowedValues_ != null) { + output.writeMessage(2, getAllowedValues()); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (allowedValues_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getAllowedValues()); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.KernelDef.AttrConstraint)) { + return super.equals(obj); + } + org.tensorflow.framework.KernelDef.AttrConstraint other = (org.tensorflow.framework.KernelDef.AttrConstraint) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && (hasAllowedValues() == other.hasAllowedValues()); + if (hasAllowedValues()) { + result = result && getAllowedValues() + .equals(other.getAllowedValues()); + } + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (hasAllowedValues()) { + hash = (37 * hash) + ALLOWED_VALUES_FIELD_NUMBER; + hash = (53 * hash) + getAllowedValues().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.KernelDef.AttrConstraint parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.KernelDef.AttrConstraint parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.KernelDef.AttrConstraint parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.KernelDef.AttrConstraint parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.KernelDef.AttrConstraint parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.KernelDef.AttrConstraint parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.KernelDef.AttrConstraint parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.KernelDef.AttrConstraint parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.KernelDef.AttrConstraint parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.KernelDef.AttrConstraint parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.KernelDef.AttrConstraint prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.KernelDef.AttrConstraint} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.KernelDef.AttrConstraint) + org.tensorflow.framework.KernelDef.AttrConstraintOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_AttrConstraint_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_AttrConstraint_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.KernelDef.AttrConstraint.class, org.tensorflow.framework.KernelDef.AttrConstraint.Builder.class); + } + + // Construct using org.tensorflow.framework.KernelDef.AttrConstraint.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + if (allowedValuesBuilder_ == null) { + allowedValues_ = null; + } else { + allowedValues_ = null; + allowedValuesBuilder_ = null; + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_AttrConstraint_descriptor; + } + + public org.tensorflow.framework.KernelDef.AttrConstraint getDefaultInstanceForType() { + return org.tensorflow.framework.KernelDef.AttrConstraint.getDefaultInstance(); + } + + public org.tensorflow.framework.KernelDef.AttrConstraint build() { + org.tensorflow.framework.KernelDef.AttrConstraint result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.KernelDef.AttrConstraint buildPartial() { + org.tensorflow.framework.KernelDef.AttrConstraint result = new org.tensorflow.framework.KernelDef.AttrConstraint(this); + result.name_ = name_; + if (allowedValuesBuilder_ == null) { + result.allowedValues_ = allowedValues_; + } else { + result.allowedValues_ = allowedValuesBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.KernelDef.AttrConstraint) { + return mergeFrom((org.tensorflow.framework.KernelDef.AttrConstraint)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.KernelDef.AttrConstraint other) { + if (other == org.tensorflow.framework.KernelDef.AttrConstraint.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.hasAllowedValues()) { + mergeAllowedValues(other.getAllowedValues()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.KernelDef.AttrConstraint parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.KernelDef.AttrConstraint) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+       * Name of an attr from the Op.
+       * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Name of an attr from the Op.
+       * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Name of an attr from the Op.
+       * 
+ * + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+       * Name of an attr from the Op.
+       * 
+ * + * optional string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+       * Name of an attr from the Op.
+       * 
+ * + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private org.tensorflow.framework.AttrValue allowedValues_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder> allowedValuesBuilder_; + /** + *
+       * A list of values that this kernel supports for this attr.
+       * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public boolean hasAllowedValues() { + return allowedValuesBuilder_ != null || allowedValues_ != null; + } + /** + *
+       * A list of values that this kernel supports for this attr.
+       * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public org.tensorflow.framework.AttrValue getAllowedValues() { + if (allowedValuesBuilder_ == null) { + return allowedValues_ == null ? org.tensorflow.framework.AttrValue.getDefaultInstance() : allowedValues_; + } else { + return allowedValuesBuilder_.getMessage(); + } + } + /** + *
+       * A list of values that this kernel supports for this attr.
+       * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public Builder setAllowedValues(org.tensorflow.framework.AttrValue value) { + if (allowedValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + allowedValues_ = value; + onChanged(); + } else { + allowedValuesBuilder_.setMessage(value); + } + + return this; + } + /** + *
+       * A list of values that this kernel supports for this attr.
+       * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public Builder setAllowedValues( + org.tensorflow.framework.AttrValue.Builder builderForValue) { + if (allowedValuesBuilder_ == null) { + allowedValues_ = builderForValue.build(); + onChanged(); + } else { + allowedValuesBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+       * A list of values that this kernel supports for this attr.
+       * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public Builder mergeAllowedValues(org.tensorflow.framework.AttrValue value) { + if (allowedValuesBuilder_ == null) { + if (allowedValues_ != null) { + allowedValues_ = + org.tensorflow.framework.AttrValue.newBuilder(allowedValues_).mergeFrom(value).buildPartial(); + } else { + allowedValues_ = value; + } + onChanged(); + } else { + allowedValuesBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+       * A list of values that this kernel supports for this attr.
+       * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public Builder clearAllowedValues() { + if (allowedValuesBuilder_ == null) { + allowedValues_ = null; + onChanged(); + } else { + allowedValues_ = null; + allowedValuesBuilder_ = null; + } + + return this; + } + /** + *
+       * A list of values that this kernel supports for this attr.
+       * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public org.tensorflow.framework.AttrValue.Builder getAllowedValuesBuilder() { + + onChanged(); + return getAllowedValuesFieldBuilder().getBuilder(); + } + /** + *
+       * A list of values that this kernel supports for this attr.
+       * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + public org.tensorflow.framework.AttrValueOrBuilder getAllowedValuesOrBuilder() { + if (allowedValuesBuilder_ != null) { + return allowedValuesBuilder_.getMessageOrBuilder(); + } else { + return allowedValues_ == null ? + org.tensorflow.framework.AttrValue.getDefaultInstance() : allowedValues_; + } + } + /** + *
+       * A list of values that this kernel supports for this attr.
+       * Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder> + getAllowedValuesFieldBuilder() { + if (allowedValuesBuilder_ == null) { + allowedValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder>( + getAllowedValues(), + getParentForChildren(), + isClean()); + allowedValues_ = null; + } + return allowedValuesBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.KernelDef.AttrConstraint) + } + + // @@protoc_insertion_point(class_scope:tensorflow.KernelDef.AttrConstraint) + private static final org.tensorflow.framework.KernelDef.AttrConstraint DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.KernelDef.AttrConstraint(); + } + + public static org.tensorflow.framework.KernelDef.AttrConstraint getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AttrConstraint parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AttrConstraint(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.KernelDef.AttrConstraint getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private int bitField0_; + public static final int OP_FIELD_NUMBER = 1; + private volatile java.lang.Object op_; + /** + *
+   * Must match the name of an Op.
+   * 
+ * + * optional string op = 1; + */ + public java.lang.String getOp() { + java.lang.Object ref = op_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + op_ = s; + return s; + } + } + /** + *
+   * Must match the name of an Op.
+   * 
+ * + * optional string op = 1; + */ + public com.google.protobuf.ByteString + getOpBytes() { + java.lang.Object ref = op_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + op_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DEVICE_TYPE_FIELD_NUMBER = 2; + private volatile java.lang.Object deviceType_; + /** + *
+   * Type of device this kernel runs on.
+   * 
+ * + * optional string device_type = 2; + */ + public java.lang.String getDeviceType() { + java.lang.Object ref = deviceType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + deviceType_ = s; + return s; + } + } + /** + *
+   * Type of device this kernel runs on.
+   * 
+ * + * optional string device_type = 2; + */ + public com.google.protobuf.ByteString + getDeviceTypeBytes() { + java.lang.Object ref = deviceType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + deviceType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CONSTRAINT_FIELD_NUMBER = 3; + private java.util.List constraint_; + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public java.util.List getConstraintList() { + return constraint_; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public java.util.List + getConstraintOrBuilderList() { + return constraint_; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public int getConstraintCount() { + return constraint_.size(); + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public org.tensorflow.framework.KernelDef.AttrConstraint getConstraint(int index) { + return constraint_.get(index); + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public org.tensorflow.framework.KernelDef.AttrConstraintOrBuilder getConstraintOrBuilder( + int index) { + return constraint_.get(index); + } + + public static final int HOST_MEMORY_ARG_FIELD_NUMBER = 4; + private com.google.protobuf.LazyStringList hostMemoryArg_; + /** + *
+   * Names of the Op's input_/output_args that reside in host memory
+   * instead of device memory.
+   * 
+ * + * repeated string host_memory_arg = 4; + */ + public com.google.protobuf.ProtocolStringList + getHostMemoryArgList() { + return hostMemoryArg_; + } + /** + *
+   * Names of the Op's input_/output_args that reside in host memory
+   * instead of device memory.
+   * 
+ * + * repeated string host_memory_arg = 4; + */ + public int getHostMemoryArgCount() { + return hostMemoryArg_.size(); + } + /** + *
+   * Names of the Op's input_/output_args that reside in host memory
+   * instead of device memory.
+   * 
+ * + * repeated string host_memory_arg = 4; + */ + public java.lang.String getHostMemoryArg(int index) { + return hostMemoryArg_.get(index); + } + /** + *
+   * Names of the Op's input_/output_args that reside in host memory
+   * instead of device memory.
+   * 
+ * + * repeated string host_memory_arg = 4; + */ + public com.google.protobuf.ByteString + getHostMemoryArgBytes(int index) { + return hostMemoryArg_.getByteString(index); + } + + public static final int LABEL_FIELD_NUMBER = 5; + private volatile java.lang.Object label_; + /** + *
+   * This allows experimental kernels to be registered for an op that
+   * won't be used unless the user specifies a "_kernel" attr with
+   * value matching this.
+   * 
+ * + * optional string label = 5; + */ + public java.lang.String getLabel() { + java.lang.Object ref = label_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + label_ = s; + return s; + } + } + /** + *
+   * This allows experimental kernels to be registered for an op that
+   * won't be used unless the user specifies a "_kernel" attr with
+   * value matching this.
+   * 
+ * + * optional string label = 5; + */ + public com.google.protobuf.ByteString + getLabelBytes() { + java.lang.Object ref = label_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + label_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getOpBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, op_); + } + if (!getDeviceTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, deviceType_); + } + for (int i = 0; i < constraint_.size(); i++) { + output.writeMessage(3, constraint_.get(i)); + } + for (int i = 0; i < hostMemoryArg_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, hostMemoryArg_.getRaw(i)); + } + if (!getLabelBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, label_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getOpBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, op_); + } + if (!getDeviceTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, deviceType_); + } + for (int i = 0; i < constraint_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, constraint_.get(i)); + } + { + int dataSize = 0; + for (int i = 0; i < hostMemoryArg_.size(); i++) { + dataSize += computeStringSizeNoTag(hostMemoryArg_.getRaw(i)); + } + size += dataSize; + size += 1 * getHostMemoryArgList().size(); + } + if (!getLabelBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, label_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.KernelDef)) { + return super.equals(obj); + } + org.tensorflow.framework.KernelDef other = (org.tensorflow.framework.KernelDef) obj; + + boolean result = true; + result = result && getOp() + .equals(other.getOp()); + result = result && getDeviceType() + .equals(other.getDeviceType()); + result = result && getConstraintList() + .equals(other.getConstraintList()); + result = result && getHostMemoryArgList() + .equals(other.getHostMemoryArgList()); + result = result && getLabel() + .equals(other.getLabel()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + OP_FIELD_NUMBER; + hash = (53 * hash) + getOp().hashCode(); + hash = (37 * hash) + DEVICE_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getDeviceType().hashCode(); + if (getConstraintCount() > 0) { + hash = (37 * hash) + CONSTRAINT_FIELD_NUMBER; + hash = (53 * hash) + getConstraintList().hashCode(); + } + if (getHostMemoryArgCount() > 0) { + hash = (37 * hash) + HOST_MEMORY_ARG_FIELD_NUMBER; + hash = (53 * hash) + getHostMemoryArgList().hashCode(); + } + hash = (37 * hash) + LABEL_FIELD_NUMBER; + hash = (53 * hash) + getLabel().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.KernelDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.KernelDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.KernelDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.KernelDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.KernelDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.KernelDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.KernelDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.KernelDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.KernelDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.KernelDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.KernelDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.KernelDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.KernelDef) + org.tensorflow.framework.KernelDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.KernelDef.class, org.tensorflow.framework.KernelDef.Builder.class); + } + + // Construct using org.tensorflow.framework.KernelDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getConstraintFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + op_ = ""; + + deviceType_ = ""; + + if (constraintBuilder_ == null) { + constraint_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + constraintBuilder_.clear(); + } + hostMemoryArg_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + label_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.KernelDefProtos.internal_static_tensorflow_KernelDef_descriptor; + } + + public org.tensorflow.framework.KernelDef getDefaultInstanceForType() { + return org.tensorflow.framework.KernelDef.getDefaultInstance(); + } + + public org.tensorflow.framework.KernelDef build() { + org.tensorflow.framework.KernelDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.KernelDef buildPartial() { + org.tensorflow.framework.KernelDef result = new org.tensorflow.framework.KernelDef(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.op_ = op_; + result.deviceType_ = deviceType_; + if (constraintBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + constraint_ = java.util.Collections.unmodifiableList(constraint_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.constraint_ = constraint_; + } else { + result.constraint_ = constraintBuilder_.build(); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + hostMemoryArg_ = hostMemoryArg_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.hostMemoryArg_ = hostMemoryArg_; + result.label_ = label_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.KernelDef) { + return mergeFrom((org.tensorflow.framework.KernelDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.KernelDef other) { + if (other == org.tensorflow.framework.KernelDef.getDefaultInstance()) return this; + if (!other.getOp().isEmpty()) { + op_ = other.op_; + onChanged(); + } + if (!other.getDeviceType().isEmpty()) { + deviceType_ = other.deviceType_; + onChanged(); + } + if (constraintBuilder_ == null) { + if (!other.constraint_.isEmpty()) { + if (constraint_.isEmpty()) { + constraint_ = other.constraint_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureConstraintIsMutable(); + constraint_.addAll(other.constraint_); + } + onChanged(); + } + } else { + if (!other.constraint_.isEmpty()) { + if (constraintBuilder_.isEmpty()) { + constraintBuilder_.dispose(); + constraintBuilder_ = null; + constraint_ = other.constraint_; + bitField0_ = (bitField0_ & ~0x00000004); + constraintBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getConstraintFieldBuilder() : null; + } else { + constraintBuilder_.addAllMessages(other.constraint_); + } + } + } + if (!other.hostMemoryArg_.isEmpty()) { + if (hostMemoryArg_.isEmpty()) { + hostMemoryArg_ = other.hostMemoryArg_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureHostMemoryArgIsMutable(); + hostMemoryArg_.addAll(other.hostMemoryArg_); + } + onChanged(); + } + if (!other.getLabel().isEmpty()) { + label_ = other.label_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.KernelDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.KernelDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object op_ = ""; + /** + *
+     * Must match the name of an Op.
+     * 
+ * + * optional string op = 1; + */ + public java.lang.String getOp() { + java.lang.Object ref = op_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + op_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Must match the name of an Op.
+     * 
+ * + * optional string op = 1; + */ + public com.google.protobuf.ByteString + getOpBytes() { + java.lang.Object ref = op_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + op_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Must match the name of an Op.
+     * 
+ * + * optional string op = 1; + */ + public Builder setOp( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + op_ = value; + onChanged(); + return this; + } + /** + *
+     * Must match the name of an Op.
+     * 
+ * + * optional string op = 1; + */ + public Builder clearOp() { + + op_ = getDefaultInstance().getOp(); + onChanged(); + return this; + } + /** + *
+     * Must match the name of an Op.
+     * 
+ * + * optional string op = 1; + */ + public Builder setOpBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + op_ = value; + onChanged(); + return this; + } + + private java.lang.Object deviceType_ = ""; + /** + *
+     * Type of device this kernel runs on.
+     * 
+ * + * optional string device_type = 2; + */ + public java.lang.String getDeviceType() { + java.lang.Object ref = deviceType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + deviceType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Type of device this kernel runs on.
+     * 
+ * + * optional string device_type = 2; + */ + public com.google.protobuf.ByteString + getDeviceTypeBytes() { + java.lang.Object ref = deviceType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + deviceType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Type of device this kernel runs on.
+     * 
+ * + * optional string device_type = 2; + */ + public Builder setDeviceType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + deviceType_ = value; + onChanged(); + return this; + } + /** + *
+     * Type of device this kernel runs on.
+     * 
+ * + * optional string device_type = 2; + */ + public Builder clearDeviceType() { + + deviceType_ = getDefaultInstance().getDeviceType(); + onChanged(); + return this; + } + /** + *
+     * Type of device this kernel runs on.
+     * 
+ * + * optional string device_type = 2; + */ + public Builder setDeviceTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + deviceType_ = value; + onChanged(); + return this; + } + + private java.util.List constraint_ = + java.util.Collections.emptyList(); + private void ensureConstraintIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + constraint_ = new java.util.ArrayList(constraint_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.KernelDef.AttrConstraint, org.tensorflow.framework.KernelDef.AttrConstraint.Builder, org.tensorflow.framework.KernelDef.AttrConstraintOrBuilder> constraintBuilder_; + + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public java.util.List getConstraintList() { + if (constraintBuilder_ == null) { + return java.util.Collections.unmodifiableList(constraint_); + } else { + return constraintBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public int getConstraintCount() { + if (constraintBuilder_ == null) { + return constraint_.size(); + } else { + return constraintBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public org.tensorflow.framework.KernelDef.AttrConstraint getConstraint(int index) { + if (constraintBuilder_ == null) { + return constraint_.get(index); + } else { + return constraintBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public Builder setConstraint( + int index, org.tensorflow.framework.KernelDef.AttrConstraint value) { + if (constraintBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureConstraintIsMutable(); + constraint_.set(index, value); + onChanged(); + } else { + constraintBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public Builder setConstraint( + int index, org.tensorflow.framework.KernelDef.AttrConstraint.Builder builderForValue) { + if (constraintBuilder_ == null) { + ensureConstraintIsMutable(); + constraint_.set(index, builderForValue.build()); + onChanged(); + } else { + constraintBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public Builder addConstraint(org.tensorflow.framework.KernelDef.AttrConstraint value) { + if (constraintBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureConstraintIsMutable(); + constraint_.add(value); + onChanged(); + } else { + constraintBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public Builder addConstraint( + int index, org.tensorflow.framework.KernelDef.AttrConstraint value) { + if (constraintBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureConstraintIsMutable(); + constraint_.add(index, value); + onChanged(); + } else { + constraintBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public Builder addConstraint( + org.tensorflow.framework.KernelDef.AttrConstraint.Builder builderForValue) { + if (constraintBuilder_ == null) { + ensureConstraintIsMutable(); + constraint_.add(builderForValue.build()); + onChanged(); + } else { + constraintBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public Builder addConstraint( + int index, org.tensorflow.framework.KernelDef.AttrConstraint.Builder builderForValue) { + if (constraintBuilder_ == null) { + ensureConstraintIsMutable(); + constraint_.add(index, builderForValue.build()); + onChanged(); + } else { + constraintBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public Builder addAllConstraint( + java.lang.Iterable values) { + if (constraintBuilder_ == null) { + ensureConstraintIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, constraint_); + onChanged(); + } else { + constraintBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public Builder clearConstraint() { + if (constraintBuilder_ == null) { + constraint_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + constraintBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public Builder removeConstraint(int index) { + if (constraintBuilder_ == null) { + ensureConstraintIsMutable(); + constraint_.remove(index); + onChanged(); + } else { + constraintBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public org.tensorflow.framework.KernelDef.AttrConstraint.Builder getConstraintBuilder( + int index) { + return getConstraintFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public org.tensorflow.framework.KernelDef.AttrConstraintOrBuilder getConstraintOrBuilder( + int index) { + if (constraintBuilder_ == null) { + return constraint_.get(index); } else { + return constraintBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public java.util.List + getConstraintOrBuilderList() { + if (constraintBuilder_ != null) { + return constraintBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(constraint_); + } + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public org.tensorflow.framework.KernelDef.AttrConstraint.Builder addConstraintBuilder() { + return getConstraintFieldBuilder().addBuilder( + org.tensorflow.framework.KernelDef.AttrConstraint.getDefaultInstance()); + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public org.tensorflow.framework.KernelDef.AttrConstraint.Builder addConstraintBuilder( + int index) { + return getConstraintFieldBuilder().addBuilder( + index, org.tensorflow.framework.KernelDef.AttrConstraint.getDefaultInstance()); + } + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + public java.util.List + getConstraintBuilderList() { + return getConstraintFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.KernelDef.AttrConstraint, org.tensorflow.framework.KernelDef.AttrConstraint.Builder, org.tensorflow.framework.KernelDef.AttrConstraintOrBuilder> + getConstraintFieldBuilder() { + if (constraintBuilder_ == null) { + constraintBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.KernelDef.AttrConstraint, org.tensorflow.framework.KernelDef.AttrConstraint.Builder, org.tensorflow.framework.KernelDef.AttrConstraintOrBuilder>( + constraint_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + constraint_ = null; + } + return constraintBuilder_; + } + + private com.google.protobuf.LazyStringList hostMemoryArg_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureHostMemoryArgIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + hostMemoryArg_ = new com.google.protobuf.LazyStringArrayList(hostMemoryArg_); + bitField0_ |= 0x00000008; + } + } + /** + *
+     * Names of the Op's input_/output_args that reside in host memory
+     * instead of device memory.
+     * 
+ * + * repeated string host_memory_arg = 4; + */ + public com.google.protobuf.ProtocolStringList + getHostMemoryArgList() { + return hostMemoryArg_.getUnmodifiableView(); + } + /** + *
+     * Names of the Op's input_/output_args that reside in host memory
+     * instead of device memory.
+     * 
+ * + * repeated string host_memory_arg = 4; + */ + public int getHostMemoryArgCount() { + return hostMemoryArg_.size(); + } + /** + *
+     * Names of the Op's input_/output_args that reside in host memory
+     * instead of device memory.
+     * 
+ * + * repeated string host_memory_arg = 4; + */ + public java.lang.String getHostMemoryArg(int index) { + return hostMemoryArg_.get(index); + } + /** + *
+     * Names of the Op's input_/output_args that reside in host memory
+     * instead of device memory.
+     * 
+ * + * repeated string host_memory_arg = 4; + */ + public com.google.protobuf.ByteString + getHostMemoryArgBytes(int index) { + return hostMemoryArg_.getByteString(index); + } + /** + *
+     * Names of the Op's input_/output_args that reside in host memory
+     * instead of device memory.
+     * 
+ * + * repeated string host_memory_arg = 4; + */ + public Builder setHostMemoryArg( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureHostMemoryArgIsMutable(); + hostMemoryArg_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Names of the Op's input_/output_args that reside in host memory
+     * instead of device memory.
+     * 
+ * + * repeated string host_memory_arg = 4; + */ + public Builder addHostMemoryArg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureHostMemoryArgIsMutable(); + hostMemoryArg_.add(value); + onChanged(); + return this; + } + /** + *
+     * Names of the Op's input_/output_args that reside in host memory
+     * instead of device memory.
+     * 
+ * + * repeated string host_memory_arg = 4; + */ + public Builder addAllHostMemoryArg( + java.lang.Iterable values) { + ensureHostMemoryArgIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, hostMemoryArg_); + onChanged(); + return this; + } + /** + *
+     * Names of the Op's input_/output_args that reside in host memory
+     * instead of device memory.
+     * 
+ * + * repeated string host_memory_arg = 4; + */ + public Builder clearHostMemoryArg() { + hostMemoryArg_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + *
+     * Names of the Op's input_/output_args that reside in host memory
+     * instead of device memory.
+     * 
+ * + * repeated string host_memory_arg = 4; + */ + public Builder addHostMemoryArgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureHostMemoryArgIsMutable(); + hostMemoryArg_.add(value); + onChanged(); + return this; + } + + private java.lang.Object label_ = ""; + /** + *
+     * This allows experimental kernels to be registered for an op that
+     * won't be used unless the user specifies a "_kernel" attr with
+     * value matching this.
+     * 
+ * + * optional string label = 5; + */ + public java.lang.String getLabel() { + java.lang.Object ref = label_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + label_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * This allows experimental kernels to be registered for an op that
+     * won't be used unless the user specifies a "_kernel" attr with
+     * value matching this.
+     * 
+ * + * optional string label = 5; + */ + public com.google.protobuf.ByteString + getLabelBytes() { + java.lang.Object ref = label_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + label_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * This allows experimental kernels to be registered for an op that
+     * won't be used unless the user specifies a "_kernel" attr with
+     * value matching this.
+     * 
+ * + * optional string label = 5; + */ + public Builder setLabel( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + label_ = value; + onChanged(); + return this; + } + /** + *
+     * This allows experimental kernels to be registered for an op that
+     * won't be used unless the user specifies a "_kernel" attr with
+     * value matching this.
+     * 
+ * + * optional string label = 5; + */ + public Builder clearLabel() { + + label_ = getDefaultInstance().getLabel(); + onChanged(); + return this; + } + /** + *
+     * This allows experimental kernels to be registered for an op that
+     * won't be used unless the user specifies a "_kernel" attr with
+     * value matching this.
+     * 
+ * + * optional string label = 5; + */ + public Builder setLabelBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + label_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.KernelDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.KernelDef) + private static final org.tensorflow.framework.KernelDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.KernelDef(); + } + + public static org.tensorflow.framework.KernelDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public KernelDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new KernelDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.KernelDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDefOrBuilder.java new file mode 100644 index 0000000000000..3eb3e30d9ec40 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDefOrBuilder.java @@ -0,0 +1,130 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/kernel_def.proto + +package org.tensorflow.framework; + +public interface KernelDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.KernelDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Must match the name of an Op.
+   * 
+ * + * optional string op = 1; + */ + java.lang.String getOp(); + /** + *
+   * Must match the name of an Op.
+   * 
+ * + * optional string op = 1; + */ + com.google.protobuf.ByteString + getOpBytes(); + + /** + *
+   * Type of device this kernel runs on.
+   * 
+ * + * optional string device_type = 2; + */ + java.lang.String getDeviceType(); + /** + *
+   * Type of device this kernel runs on.
+   * 
+ * + * optional string device_type = 2; + */ + com.google.protobuf.ByteString + getDeviceTypeBytes(); + + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + java.util.List + getConstraintList(); + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + org.tensorflow.framework.KernelDef.AttrConstraint getConstraint(int index); + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + int getConstraintCount(); + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + java.util.List + getConstraintOrBuilderList(); + /** + * repeated .tensorflow.KernelDef.AttrConstraint constraint = 3; + */ + org.tensorflow.framework.KernelDef.AttrConstraintOrBuilder getConstraintOrBuilder( + int index); + + /** + *
+   * Names of the Op's input_/output_args that reside in host memory
+   * instead of device memory.
+   * 
+ * + * repeated string host_memory_arg = 4; + */ + java.util.List + getHostMemoryArgList(); + /** + *
+   * Names of the Op's input_/output_args that reside in host memory
+   * instead of device memory.
+   * 
+ * + * repeated string host_memory_arg = 4; + */ + int getHostMemoryArgCount(); + /** + *
+   * Names of the Op's input_/output_args that reside in host memory
+   * instead of device memory.
+   * 
+ * + * repeated string host_memory_arg = 4; + */ + java.lang.String getHostMemoryArg(int index); + /** + *
+   * Names of the Op's input_/output_args that reside in host memory
+   * instead of device memory.
+   * 
+ * + * repeated string host_memory_arg = 4; + */ + com.google.protobuf.ByteString + getHostMemoryArgBytes(int index); + + /** + *
+   * This allows experimental kernels to be registered for an op that
+   * won't be used unless the user specifies a "_kernel" attr with
+   * value matching this.
+   * 
+ * + * optional string label = 5; + */ + java.lang.String getLabel(); + /** + *
+   * This allows experimental kernels to be registered for an op that
+   * won't be used unless the user specifies a "_kernel" attr with
+   * value matching this.
+   * 
+ * + * optional string label = 5; + */ + com.google.protobuf.ByteString + getLabelBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDefProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDefProtos.java new file mode 100644 index 0000000000000..ea3042a34ccba --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/KernelDefProtos.java @@ -0,0 +1,76 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/kernel_def.proto + +package org.tensorflow.framework; + +public final class KernelDefProtos { + private KernelDefProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_KernelDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_KernelDef_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_KernelDef_AttrConstraint_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_KernelDef_AttrConstraint_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n*tensorflow/core/framework/kernel_def.p" + + "roto\022\ntensorflow\032*tensorflow/core/framew" + + "ork/attr_value.proto\"\335\001\n\tKernelDef\022\n\n\002op" + + "\030\001 \001(\t\022\023\n\013device_type\030\002 \001(\t\0228\n\nconstrain" + + "t\030\003 \003(\0132$.tensorflow.KernelDef.AttrConst" + + "raint\022\027\n\017host_memory_arg\030\004 \003(\t\022\r\n\005label\030" + + "\005 \001(\t\032M\n\016AttrConstraint\022\014\n\004name\030\001 \001(\t\022-\n" + + "\016allowed_values\030\002 \001(\0132\025.tensorflow.AttrV" + + "alueB0\n\030org.tensorflow.frameworkB\017Kernel" + + "DefProtosP\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.AttrValueProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_KernelDef_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_KernelDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_KernelDef_descriptor, + new java.lang.String[] { "Op", "DeviceType", "Constraint", "HostMemoryArg", "Label", }); + internal_static_tensorflow_KernelDef_AttrConstraint_descriptor = + internal_static_tensorflow_KernelDef_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_KernelDef_AttrConstraint_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_KernelDef_AttrConstraint_descriptor, + new java.lang.String[] { "Name", "AllowedValues", }); + org.tensorflow.framework.AttrValueProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/LogMemoryProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/LogMemoryProtos.java new file mode 100644 index 0000000000000..7866d7be4bfc3 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/LogMemoryProtos.java @@ -0,0 +1,131 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +public final class LogMemoryProtos { + private LogMemoryProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_MemoryLogStep_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_MemoryLogStep_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_MemoryLogTensorAllocation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_MemoryLogTensorAllocation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_MemoryLogTensorDeallocation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_MemoryLogTensorDeallocation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_MemoryLogTensorOutput_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_MemoryLogTensorOutput_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_MemoryLogRawAllocation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_MemoryLogRawAllocation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_MemoryLogRawDeallocation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_MemoryLogRawDeallocation_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n*tensorflow/core/framework/log_memory.p" + + "roto\022\ntensorflow\0322tensorflow/core/framew" + + "ork/tensor_description.proto\"0\n\rMemoryLo" + + "gStep\022\017\n\007step_id\030\001 \001(\003\022\016\n\006handle\030\002 \001(\t\"p" + + "\n\031MemoryLogTensorAllocation\022\017\n\007step_id\030\001" + + " \001(\003\022\023\n\013kernel_name\030\002 \001(\t\022-\n\006tensor\030\003 \001(" + + "\0132\035.tensorflow.TensorDescription\"L\n\033Memo" + + "ryLogTensorDeallocation\022\025\n\rallocation_id" + + "\030\001 \001(\003\022\026\n\016allocator_name\030\002 \001(\t\"{\n\025Memory" + + "LogTensorOutput\022\017\n\007step_id\030\001 \001(\003\022\023\n\013kern", + "el_name\030\002 \001(\t\022\r\n\005index\030\003 \001(\005\022-\n\006tensor\030\004" + + " \001(\0132\035.tensorflow.TensorDescription\"\213\001\n\026" + + "MemoryLogRawAllocation\022\017\n\007step_id\030\001 \001(\003\022" + + "\021\n\toperation\030\002 \001(\t\022\021\n\tnum_bytes\030\003 \001(\003\022\013\n" + + "\003ptr\030\004 \001(\004\022\025\n\rallocation_id\030\005 \001(\003\022\026\n\016all" + + "ocator_name\030\006 \001(\t\"\177\n\030MemoryLogRawDealloc" + + "ation\022\017\n\007step_id\030\001 \001(\003\022\021\n\toperation\030\002 \001(" + + "\t\022\025\n\rallocation_id\030\003 \001(\003\022\026\n\016allocator_na" + + "me\030\004 \001(\t\022\020\n\010deferred\030\005 \001(\010B0\n\030org.tensor" + + "flow.frameworkB\017LogMemoryProtosP\001\370\001\001b\006pr", + "oto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.TensorDescriptionProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_MemoryLogStep_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_MemoryLogStep_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_MemoryLogStep_descriptor, + new java.lang.String[] { "StepId", "Handle", }); + internal_static_tensorflow_MemoryLogTensorAllocation_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_tensorflow_MemoryLogTensorAllocation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_MemoryLogTensorAllocation_descriptor, + new java.lang.String[] { "StepId", "KernelName", "Tensor", }); + internal_static_tensorflow_MemoryLogTensorDeallocation_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_tensorflow_MemoryLogTensorDeallocation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_MemoryLogTensorDeallocation_descriptor, + new java.lang.String[] { "AllocationId", "AllocatorName", }); + internal_static_tensorflow_MemoryLogTensorOutput_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_tensorflow_MemoryLogTensorOutput_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_MemoryLogTensorOutput_descriptor, + new java.lang.String[] { "StepId", "KernelName", "Index", "Tensor", }); + internal_static_tensorflow_MemoryLogRawAllocation_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_tensorflow_MemoryLogRawAllocation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_MemoryLogRawAllocation_descriptor, + new java.lang.String[] { "StepId", "Operation", "NumBytes", "Ptr", "AllocationId", "AllocatorName", }); + internal_static_tensorflow_MemoryLogRawDeallocation_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_tensorflow_MemoryLogRawDeallocation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_MemoryLogRawDeallocation_descriptor, + new java.lang.String[] { "StepId", "Operation", "AllocationId", "AllocatorName", "Deferred", }); + org.tensorflow.framework.TensorDescriptionProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawAllocation.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawAllocation.java new file mode 100644 index 0000000000000..ff719815669fe --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawAllocation.java @@ -0,0 +1,981 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.MemoryLogRawAllocation} + */ +public final class MemoryLogRawAllocation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.MemoryLogRawAllocation) + MemoryLogRawAllocationOrBuilder { + // Use MemoryLogRawAllocation.newBuilder() to construct. + private MemoryLogRawAllocation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private MemoryLogRawAllocation() { + stepId_ = 0L; + operation_ = ""; + numBytes_ = 0L; + ptr_ = 0L; + allocationId_ = 0L; + allocatorName_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private MemoryLogRawAllocation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + stepId_ = input.readInt64(); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + operation_ = s; + break; + } + case 24: { + + numBytes_ = input.readInt64(); + break; + } + case 32: { + + ptr_ = input.readUInt64(); + break; + } + case 40: { + + allocationId_ = input.readInt64(); + break; + } + case 50: { + java.lang.String s = input.readStringRequireUtf8(); + + allocatorName_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawAllocation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawAllocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogRawAllocation.class, org.tensorflow.framework.MemoryLogRawAllocation.Builder.class); + } + + public static final int STEP_ID_FIELD_NUMBER = 1; + private long stepId_; + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + + public static final int OPERATION_FIELD_NUMBER = 2; + private volatile java.lang.Object operation_; + /** + *
+   * Name of the operation making the allocation.
+   * 
+ * + * optional string operation = 2; + */ + public java.lang.String getOperation() { + java.lang.Object ref = operation_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operation_ = s; + return s; + } + } + /** + *
+   * Name of the operation making the allocation.
+   * 
+ * + * optional string operation = 2; + */ + public com.google.protobuf.ByteString + getOperationBytes() { + java.lang.Object ref = operation_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int NUM_BYTES_FIELD_NUMBER = 3; + private long numBytes_; + /** + *
+   * Number of bytes in the allocation.
+   * 
+ * + * optional int64 num_bytes = 3; + */ + public long getNumBytes() { + return numBytes_; + } + + public static final int PTR_FIELD_NUMBER = 4; + private long ptr_; + /** + *
+   * Address of the allocation.
+   * 
+ * + * optional uint64 ptr = 4; + */ + public long getPtr() { + return ptr_; + } + + public static final int ALLOCATION_ID_FIELD_NUMBER = 5; + private long allocationId_; + /** + *
+   * Id of the tensor buffer being allocated, used to match to a
+   * corresponding deallocation.
+   * 
+ * + * optional int64 allocation_id = 5; + */ + public long getAllocationId() { + return allocationId_; + } + + public static final int ALLOCATOR_NAME_FIELD_NUMBER = 6; + private volatile java.lang.Object allocatorName_; + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 6; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } + } + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 6; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (stepId_ != 0L) { + output.writeInt64(1, stepId_); + } + if (!getOperationBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, operation_); + } + if (numBytes_ != 0L) { + output.writeInt64(3, numBytes_); + } + if (ptr_ != 0L) { + output.writeUInt64(4, ptr_); + } + if (allocationId_ != 0L) { + output.writeInt64(5, allocationId_); + } + if (!getAllocatorNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, allocatorName_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (stepId_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, stepId_); + } + if (!getOperationBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, operation_); + } + if (numBytes_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, numBytes_); + } + if (ptr_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, ptr_); + } + if (allocationId_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(5, allocationId_); + } + if (!getAllocatorNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, allocatorName_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.MemoryLogRawAllocation)) { + return super.equals(obj); + } + org.tensorflow.framework.MemoryLogRawAllocation other = (org.tensorflow.framework.MemoryLogRawAllocation) obj; + + boolean result = true; + result = result && (getStepId() + == other.getStepId()); + result = result && getOperation() + .equals(other.getOperation()); + result = result && (getNumBytes() + == other.getNumBytes()); + result = result && (getPtr() + == other.getPtr()); + result = result && (getAllocationId() + == other.getAllocationId()); + result = result && getAllocatorName() + .equals(other.getAllocatorName()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + STEP_ID_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStepId()); + hash = (37 * hash) + OPERATION_FIELD_NUMBER; + hash = (53 * hash) + getOperation().hashCode(); + hash = (37 * hash) + NUM_BYTES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNumBytes()); + hash = (37 * hash) + PTR_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getPtr()); + hash = (37 * hash) + ALLOCATION_ID_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAllocationId()); + hash = (37 * hash) + ALLOCATOR_NAME_FIELD_NUMBER; + hash = (53 * hash) + getAllocatorName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.MemoryLogRawAllocation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogRawAllocation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogRawAllocation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogRawAllocation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogRawAllocation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogRawAllocation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogRawAllocation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogRawAllocation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogRawAllocation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogRawAllocation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.MemoryLogRawAllocation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.MemoryLogRawAllocation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.MemoryLogRawAllocation) + org.tensorflow.framework.MemoryLogRawAllocationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawAllocation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawAllocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogRawAllocation.class, org.tensorflow.framework.MemoryLogRawAllocation.Builder.class); + } + + // Construct using org.tensorflow.framework.MemoryLogRawAllocation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + stepId_ = 0L; + + operation_ = ""; + + numBytes_ = 0L; + + ptr_ = 0L; + + allocationId_ = 0L; + + allocatorName_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawAllocation_descriptor; + } + + public org.tensorflow.framework.MemoryLogRawAllocation getDefaultInstanceForType() { + return org.tensorflow.framework.MemoryLogRawAllocation.getDefaultInstance(); + } + + public org.tensorflow.framework.MemoryLogRawAllocation build() { + org.tensorflow.framework.MemoryLogRawAllocation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.MemoryLogRawAllocation buildPartial() { + org.tensorflow.framework.MemoryLogRawAllocation result = new org.tensorflow.framework.MemoryLogRawAllocation(this); + result.stepId_ = stepId_; + result.operation_ = operation_; + result.numBytes_ = numBytes_; + result.ptr_ = ptr_; + result.allocationId_ = allocationId_; + result.allocatorName_ = allocatorName_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.MemoryLogRawAllocation) { + return mergeFrom((org.tensorflow.framework.MemoryLogRawAllocation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.MemoryLogRawAllocation other) { + if (other == org.tensorflow.framework.MemoryLogRawAllocation.getDefaultInstance()) return this; + if (other.getStepId() != 0L) { + setStepId(other.getStepId()); + } + if (!other.getOperation().isEmpty()) { + operation_ = other.operation_; + onChanged(); + } + if (other.getNumBytes() != 0L) { + setNumBytes(other.getNumBytes()); + } + if (other.getPtr() != 0L) { + setPtr(other.getPtr()); + } + if (other.getAllocationId() != 0L) { + setAllocationId(other.getAllocationId()); + } + if (!other.getAllocatorName().isEmpty()) { + allocatorName_ = other.allocatorName_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.MemoryLogRawAllocation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.MemoryLogRawAllocation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private long stepId_ ; + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder setStepId(long value) { + + stepId_ = value; + onChanged(); + return this; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder clearStepId() { + + stepId_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object operation_ = ""; + /** + *
+     * Name of the operation making the allocation.
+     * 
+ * + * optional string operation = 2; + */ + public java.lang.String getOperation() { + java.lang.Object ref = operation_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operation_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the operation making the allocation.
+     * 
+ * + * optional string operation = 2; + */ + public com.google.protobuf.ByteString + getOperationBytes() { + java.lang.Object ref = operation_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the operation making the allocation.
+     * 
+ * + * optional string operation = 2; + */ + public Builder setOperation( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + operation_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the operation making the allocation.
+     * 
+ * + * optional string operation = 2; + */ + public Builder clearOperation() { + + operation_ = getDefaultInstance().getOperation(); + onChanged(); + return this; + } + /** + *
+     * Name of the operation making the allocation.
+     * 
+ * + * optional string operation = 2; + */ + public Builder setOperationBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + operation_ = value; + onChanged(); + return this; + } + + private long numBytes_ ; + /** + *
+     * Number of bytes in the allocation.
+     * 
+ * + * optional int64 num_bytes = 3; + */ + public long getNumBytes() { + return numBytes_; + } + /** + *
+     * Number of bytes in the allocation.
+     * 
+ * + * optional int64 num_bytes = 3; + */ + public Builder setNumBytes(long value) { + + numBytes_ = value; + onChanged(); + return this; + } + /** + *
+     * Number of bytes in the allocation.
+     * 
+ * + * optional int64 num_bytes = 3; + */ + public Builder clearNumBytes() { + + numBytes_ = 0L; + onChanged(); + return this; + } + + private long ptr_ ; + /** + *
+     * Address of the allocation.
+     * 
+ * + * optional uint64 ptr = 4; + */ + public long getPtr() { + return ptr_; + } + /** + *
+     * Address of the allocation.
+     * 
+ * + * optional uint64 ptr = 4; + */ + public Builder setPtr(long value) { + + ptr_ = value; + onChanged(); + return this; + } + /** + *
+     * Address of the allocation.
+     * 
+ * + * optional uint64 ptr = 4; + */ + public Builder clearPtr() { + + ptr_ = 0L; + onChanged(); + return this; + } + + private long allocationId_ ; + /** + *
+     * Id of the tensor buffer being allocated, used to match to a
+     * corresponding deallocation.
+     * 
+ * + * optional int64 allocation_id = 5; + */ + public long getAllocationId() { + return allocationId_; + } + /** + *
+     * Id of the tensor buffer being allocated, used to match to a
+     * corresponding deallocation.
+     * 
+ * + * optional int64 allocation_id = 5; + */ + public Builder setAllocationId(long value) { + + allocationId_ = value; + onChanged(); + return this; + } + /** + *
+     * Id of the tensor buffer being allocated, used to match to a
+     * corresponding deallocation.
+     * 
+ * + * optional int64 allocation_id = 5; + */ + public Builder clearAllocationId() { + + allocationId_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object allocatorName_ = ""; + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 6; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 6; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 6; + */ + public Builder setAllocatorName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + allocatorName_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 6; + */ + public Builder clearAllocatorName() { + + allocatorName_ = getDefaultInstance().getAllocatorName(); + onChanged(); + return this; + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 6; + */ + public Builder setAllocatorNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + allocatorName_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.MemoryLogRawAllocation) + } + + // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogRawAllocation) + private static final org.tensorflow.framework.MemoryLogRawAllocation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.MemoryLogRawAllocation(); + } + + public static org.tensorflow.framework.MemoryLogRawAllocation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MemoryLogRawAllocation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MemoryLogRawAllocation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.MemoryLogRawAllocation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawAllocationOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawAllocationOrBuilder.java new file mode 100644 index 0000000000000..c279317d55704 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawAllocationOrBuilder.java @@ -0,0 +1,82 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +public interface MemoryLogRawAllocationOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.MemoryLogRawAllocation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + long getStepId(); + + /** + *
+   * Name of the operation making the allocation.
+   * 
+ * + * optional string operation = 2; + */ + java.lang.String getOperation(); + /** + *
+   * Name of the operation making the allocation.
+   * 
+ * + * optional string operation = 2; + */ + com.google.protobuf.ByteString + getOperationBytes(); + + /** + *
+   * Number of bytes in the allocation.
+   * 
+ * + * optional int64 num_bytes = 3; + */ + long getNumBytes(); + + /** + *
+   * Address of the allocation.
+   * 
+ * + * optional uint64 ptr = 4; + */ + long getPtr(); + + /** + *
+   * Id of the tensor buffer being allocated, used to match to a
+   * corresponding deallocation.
+   * 
+ * + * optional int64 allocation_id = 5; + */ + long getAllocationId(); + + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 6; + */ + java.lang.String getAllocatorName(); + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 6; + */ + com.google.protobuf.ByteString + getAllocatorNameBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawDeallocation.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawDeallocation.java new file mode 100644 index 0000000000000..f6242692e3129 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawDeallocation.java @@ -0,0 +1,910 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.MemoryLogRawDeallocation} + */ +public final class MemoryLogRawDeallocation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.MemoryLogRawDeallocation) + MemoryLogRawDeallocationOrBuilder { + // Use MemoryLogRawDeallocation.newBuilder() to construct. + private MemoryLogRawDeallocation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private MemoryLogRawDeallocation() { + stepId_ = 0L; + operation_ = ""; + allocationId_ = 0L; + allocatorName_ = ""; + deferred_ = false; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private MemoryLogRawDeallocation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + stepId_ = input.readInt64(); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + operation_ = s; + break; + } + case 24: { + + allocationId_ = input.readInt64(); + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + allocatorName_ = s; + break; + } + case 40: { + + deferred_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawDeallocation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawDeallocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogRawDeallocation.class, org.tensorflow.framework.MemoryLogRawDeallocation.Builder.class); + } + + public static final int STEP_ID_FIELD_NUMBER = 1; + private long stepId_; + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + + public static final int OPERATION_FIELD_NUMBER = 2; + private volatile java.lang.Object operation_; + /** + *
+   * Name of the operation making the deallocation.
+   * 
+ * + * optional string operation = 2; + */ + public java.lang.String getOperation() { + java.lang.Object ref = operation_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operation_ = s; + return s; + } + } + /** + *
+   * Name of the operation making the deallocation.
+   * 
+ * + * optional string operation = 2; + */ + public com.google.protobuf.ByteString + getOperationBytes() { + java.lang.Object ref = operation_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ALLOCATION_ID_FIELD_NUMBER = 3; + private long allocationId_; + /** + *
+   * Id of the tensor buffer being deallocated, used to match to a
+   * corresponding allocation.
+   * 
+ * + * optional int64 allocation_id = 3; + */ + public long getAllocationId() { + return allocationId_; + } + + public static final int ALLOCATOR_NAME_FIELD_NUMBER = 4; + private volatile java.lang.Object allocatorName_; + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 4; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } + } + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 4; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DEFERRED_FIELD_NUMBER = 5; + private boolean deferred_; + /** + *
+   * True if the deallocation is queued and will be performed later,
+   * e.g. for GPU lazy freeing of buffers.
+   * 
+ * + * optional bool deferred = 5; + */ + public boolean getDeferred() { + return deferred_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (stepId_ != 0L) { + output.writeInt64(1, stepId_); + } + if (!getOperationBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, operation_); + } + if (allocationId_ != 0L) { + output.writeInt64(3, allocationId_); + } + if (!getAllocatorNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, allocatorName_); + } + if (deferred_ != false) { + output.writeBool(5, deferred_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (stepId_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, stepId_); + } + if (!getOperationBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, operation_); + } + if (allocationId_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, allocationId_); + } + if (!getAllocatorNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, allocatorName_); + } + if (deferred_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, deferred_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.MemoryLogRawDeallocation)) { + return super.equals(obj); + } + org.tensorflow.framework.MemoryLogRawDeallocation other = (org.tensorflow.framework.MemoryLogRawDeallocation) obj; + + boolean result = true; + result = result && (getStepId() + == other.getStepId()); + result = result && getOperation() + .equals(other.getOperation()); + result = result && (getAllocationId() + == other.getAllocationId()); + result = result && getAllocatorName() + .equals(other.getAllocatorName()); + result = result && (getDeferred() + == other.getDeferred()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + STEP_ID_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStepId()); + hash = (37 * hash) + OPERATION_FIELD_NUMBER; + hash = (53 * hash) + getOperation().hashCode(); + hash = (37 * hash) + ALLOCATION_ID_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAllocationId()); + hash = (37 * hash) + ALLOCATOR_NAME_FIELD_NUMBER; + hash = (53 * hash) + getAllocatorName().hashCode(); + hash = (37 * hash) + DEFERRED_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDeferred()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.MemoryLogRawDeallocation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogRawDeallocation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogRawDeallocation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogRawDeallocation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogRawDeallocation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogRawDeallocation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogRawDeallocation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogRawDeallocation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogRawDeallocation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogRawDeallocation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.MemoryLogRawDeallocation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.MemoryLogRawDeallocation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.MemoryLogRawDeallocation) + org.tensorflow.framework.MemoryLogRawDeallocationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawDeallocation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawDeallocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogRawDeallocation.class, org.tensorflow.framework.MemoryLogRawDeallocation.Builder.class); + } + + // Construct using org.tensorflow.framework.MemoryLogRawDeallocation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + stepId_ = 0L; + + operation_ = ""; + + allocationId_ = 0L; + + allocatorName_ = ""; + + deferred_ = false; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogRawDeallocation_descriptor; + } + + public org.tensorflow.framework.MemoryLogRawDeallocation getDefaultInstanceForType() { + return org.tensorflow.framework.MemoryLogRawDeallocation.getDefaultInstance(); + } + + public org.tensorflow.framework.MemoryLogRawDeallocation build() { + org.tensorflow.framework.MemoryLogRawDeallocation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.MemoryLogRawDeallocation buildPartial() { + org.tensorflow.framework.MemoryLogRawDeallocation result = new org.tensorflow.framework.MemoryLogRawDeallocation(this); + result.stepId_ = stepId_; + result.operation_ = operation_; + result.allocationId_ = allocationId_; + result.allocatorName_ = allocatorName_; + result.deferred_ = deferred_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.MemoryLogRawDeallocation) { + return mergeFrom((org.tensorflow.framework.MemoryLogRawDeallocation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.MemoryLogRawDeallocation other) { + if (other == org.tensorflow.framework.MemoryLogRawDeallocation.getDefaultInstance()) return this; + if (other.getStepId() != 0L) { + setStepId(other.getStepId()); + } + if (!other.getOperation().isEmpty()) { + operation_ = other.operation_; + onChanged(); + } + if (other.getAllocationId() != 0L) { + setAllocationId(other.getAllocationId()); + } + if (!other.getAllocatorName().isEmpty()) { + allocatorName_ = other.allocatorName_; + onChanged(); + } + if (other.getDeferred() != false) { + setDeferred(other.getDeferred()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.MemoryLogRawDeallocation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.MemoryLogRawDeallocation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private long stepId_ ; + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder setStepId(long value) { + + stepId_ = value; + onChanged(); + return this; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder clearStepId() { + + stepId_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object operation_ = ""; + /** + *
+     * Name of the operation making the deallocation.
+     * 
+ * + * optional string operation = 2; + */ + public java.lang.String getOperation() { + java.lang.Object ref = operation_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operation_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the operation making the deallocation.
+     * 
+ * + * optional string operation = 2; + */ + public com.google.protobuf.ByteString + getOperationBytes() { + java.lang.Object ref = operation_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the operation making the deallocation.
+     * 
+ * + * optional string operation = 2; + */ + public Builder setOperation( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + operation_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the operation making the deallocation.
+     * 
+ * + * optional string operation = 2; + */ + public Builder clearOperation() { + + operation_ = getDefaultInstance().getOperation(); + onChanged(); + return this; + } + /** + *
+     * Name of the operation making the deallocation.
+     * 
+ * + * optional string operation = 2; + */ + public Builder setOperationBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + operation_ = value; + onChanged(); + return this; + } + + private long allocationId_ ; + /** + *
+     * Id of the tensor buffer being deallocated, used to match to a
+     * corresponding allocation.
+     * 
+ * + * optional int64 allocation_id = 3; + */ + public long getAllocationId() { + return allocationId_; + } + /** + *
+     * Id of the tensor buffer being deallocated, used to match to a
+     * corresponding allocation.
+     * 
+ * + * optional int64 allocation_id = 3; + */ + public Builder setAllocationId(long value) { + + allocationId_ = value; + onChanged(); + return this; + } + /** + *
+     * Id of the tensor buffer being deallocated, used to match to a
+     * corresponding allocation.
+     * 
+ * + * optional int64 allocation_id = 3; + */ + public Builder clearAllocationId() { + + allocationId_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object allocatorName_ = ""; + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 4; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 4; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 4; + */ + public Builder setAllocatorName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + allocatorName_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 4; + */ + public Builder clearAllocatorName() { + + allocatorName_ = getDefaultInstance().getAllocatorName(); + onChanged(); + return this; + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 4; + */ + public Builder setAllocatorNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + allocatorName_ = value; + onChanged(); + return this; + } + + private boolean deferred_ ; + /** + *
+     * True if the deallocation is queued and will be performed later,
+     * e.g. for GPU lazy freeing of buffers.
+     * 
+ * + * optional bool deferred = 5; + */ + public boolean getDeferred() { + return deferred_; + } + /** + *
+     * True if the deallocation is queued and will be performed later,
+     * e.g. for GPU lazy freeing of buffers.
+     * 
+ * + * optional bool deferred = 5; + */ + public Builder setDeferred(boolean value) { + + deferred_ = value; + onChanged(); + return this; + } + /** + *
+     * True if the deallocation is queued and will be performed later,
+     * e.g. for GPU lazy freeing of buffers.
+     * 
+ * + * optional bool deferred = 5; + */ + public Builder clearDeferred() { + + deferred_ = false; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.MemoryLogRawDeallocation) + } + + // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogRawDeallocation) + private static final org.tensorflow.framework.MemoryLogRawDeallocation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.MemoryLogRawDeallocation(); + } + + public static org.tensorflow.framework.MemoryLogRawDeallocation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MemoryLogRawDeallocation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MemoryLogRawDeallocation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.MemoryLogRawDeallocation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawDeallocationOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawDeallocationOrBuilder.java new file mode 100644 index 0000000000000..8eb2862cbc722 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogRawDeallocationOrBuilder.java @@ -0,0 +1,74 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +public interface MemoryLogRawDeallocationOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.MemoryLogRawDeallocation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + long getStepId(); + + /** + *
+   * Name of the operation making the deallocation.
+   * 
+ * + * optional string operation = 2; + */ + java.lang.String getOperation(); + /** + *
+   * Name of the operation making the deallocation.
+   * 
+ * + * optional string operation = 2; + */ + com.google.protobuf.ByteString + getOperationBytes(); + + /** + *
+   * Id of the tensor buffer being deallocated, used to match to a
+   * corresponding allocation.
+   * 
+ * + * optional int64 allocation_id = 3; + */ + long getAllocationId(); + + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 4; + */ + java.lang.String getAllocatorName(); + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 4; + */ + com.google.protobuf.ByteString + getAllocatorNameBytes(); + + /** + *
+   * True if the deallocation is queued and will be performed later,
+   * e.g. for GPU lazy freeing of buffers.
+   * 
+ * + * optional bool deferred = 5; + */ + boolean getDeferred(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogStep.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogStep.java new file mode 100644 index 0000000000000..ba247e2dd32d2 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogStep.java @@ -0,0 +1,597 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.MemoryLogStep} + */ +public final class MemoryLogStep extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.MemoryLogStep) + MemoryLogStepOrBuilder { + // Use MemoryLogStep.newBuilder() to construct. + private MemoryLogStep(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private MemoryLogStep() { + stepId_ = 0L; + handle_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private MemoryLogStep( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + stepId_ = input.readInt64(); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + handle_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogStep_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogStep_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogStep.class, org.tensorflow.framework.MemoryLogStep.Builder.class); + } + + public static final int STEP_ID_FIELD_NUMBER = 1; + private long stepId_; + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + + public static final int HANDLE_FIELD_NUMBER = 2; + private volatile java.lang.Object handle_; + /** + *
+   * Handle describing the feeds and fetches of the step.
+   * 
+ * + * optional string handle = 2; + */ + public java.lang.String getHandle() { + java.lang.Object ref = handle_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + handle_ = s; + return s; + } + } + /** + *
+   * Handle describing the feeds and fetches of the step.
+   * 
+ * + * optional string handle = 2; + */ + public com.google.protobuf.ByteString + getHandleBytes() { + java.lang.Object ref = handle_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + handle_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (stepId_ != 0L) { + output.writeInt64(1, stepId_); + } + if (!getHandleBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, handle_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (stepId_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, stepId_); + } + if (!getHandleBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, handle_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.MemoryLogStep)) { + return super.equals(obj); + } + org.tensorflow.framework.MemoryLogStep other = (org.tensorflow.framework.MemoryLogStep) obj; + + boolean result = true; + result = result && (getStepId() + == other.getStepId()); + result = result && getHandle() + .equals(other.getHandle()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + STEP_ID_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStepId()); + hash = (37 * hash) + HANDLE_FIELD_NUMBER; + hash = (53 * hash) + getHandle().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.MemoryLogStep parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogStep parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogStep parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogStep parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogStep parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogStep parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogStep parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogStep parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogStep parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogStep parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.MemoryLogStep prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.MemoryLogStep} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.MemoryLogStep) + org.tensorflow.framework.MemoryLogStepOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogStep_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogStep_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogStep.class, org.tensorflow.framework.MemoryLogStep.Builder.class); + } + + // Construct using org.tensorflow.framework.MemoryLogStep.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + stepId_ = 0L; + + handle_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogStep_descriptor; + } + + public org.tensorflow.framework.MemoryLogStep getDefaultInstanceForType() { + return org.tensorflow.framework.MemoryLogStep.getDefaultInstance(); + } + + public org.tensorflow.framework.MemoryLogStep build() { + org.tensorflow.framework.MemoryLogStep result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.MemoryLogStep buildPartial() { + org.tensorflow.framework.MemoryLogStep result = new org.tensorflow.framework.MemoryLogStep(this); + result.stepId_ = stepId_; + result.handle_ = handle_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.MemoryLogStep) { + return mergeFrom((org.tensorflow.framework.MemoryLogStep)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.MemoryLogStep other) { + if (other == org.tensorflow.framework.MemoryLogStep.getDefaultInstance()) return this; + if (other.getStepId() != 0L) { + setStepId(other.getStepId()); + } + if (!other.getHandle().isEmpty()) { + handle_ = other.handle_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.MemoryLogStep parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.MemoryLogStep) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private long stepId_ ; + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder setStepId(long value) { + + stepId_ = value; + onChanged(); + return this; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder clearStepId() { + + stepId_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object handle_ = ""; + /** + *
+     * Handle describing the feeds and fetches of the step.
+     * 
+ * + * optional string handle = 2; + */ + public java.lang.String getHandle() { + java.lang.Object ref = handle_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + handle_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Handle describing the feeds and fetches of the step.
+     * 
+ * + * optional string handle = 2; + */ + public com.google.protobuf.ByteString + getHandleBytes() { + java.lang.Object ref = handle_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + handle_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Handle describing the feeds and fetches of the step.
+     * 
+ * + * optional string handle = 2; + */ + public Builder setHandle( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + handle_ = value; + onChanged(); + return this; + } + /** + *
+     * Handle describing the feeds and fetches of the step.
+     * 
+ * + * optional string handle = 2; + */ + public Builder clearHandle() { + + handle_ = getDefaultInstance().getHandle(); + onChanged(); + return this; + } + /** + *
+     * Handle describing the feeds and fetches of the step.
+     * 
+ * + * optional string handle = 2; + */ + public Builder setHandleBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + handle_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.MemoryLogStep) + } + + // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogStep) + private static final org.tensorflow.framework.MemoryLogStep DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.MemoryLogStep(); + } + + public static org.tensorflow.framework.MemoryLogStep getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MemoryLogStep parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MemoryLogStep(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.MemoryLogStep getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogStepOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogStepOrBuilder.java new file mode 100644 index 0000000000000..cf374ae233ffd --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogStepOrBuilder.java @@ -0,0 +1,36 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +public interface MemoryLogStepOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.MemoryLogStep) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + long getStepId(); + + /** + *
+   * Handle describing the feeds and fetches of the step.
+   * 
+ * + * optional string handle = 2; + */ + java.lang.String getHandle(); + /** + *
+   * Handle describing the feeds and fetches of the step.
+   * 
+ * + * optional string handle = 2; + */ + com.google.protobuf.ByteString + getHandleBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorAllocation.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorAllocation.java new file mode 100644 index 0000000000000..47cdc2efe665c --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorAllocation.java @@ -0,0 +1,833 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.MemoryLogTensorAllocation} + */ +public final class MemoryLogTensorAllocation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.MemoryLogTensorAllocation) + MemoryLogTensorAllocationOrBuilder { + // Use MemoryLogTensorAllocation.newBuilder() to construct. + private MemoryLogTensorAllocation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private MemoryLogTensorAllocation() { + stepId_ = 0L; + kernelName_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private MemoryLogTensorAllocation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + stepId_ = input.readInt64(); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + kernelName_ = s; + break; + } + case 26: { + org.tensorflow.framework.TensorDescription.Builder subBuilder = null; + if (tensor_ != null) { + subBuilder = tensor_.toBuilder(); + } + tensor_ = input.readMessage(org.tensorflow.framework.TensorDescription.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(tensor_); + tensor_ = subBuilder.buildPartial(); + } + + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorAllocation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorAllocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogTensorAllocation.class, org.tensorflow.framework.MemoryLogTensorAllocation.Builder.class); + } + + public static final int STEP_ID_FIELD_NUMBER = 1; + private long stepId_; + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + + public static final int KERNEL_NAME_FIELD_NUMBER = 2; + private volatile java.lang.Object kernelName_; + /** + *
+   * Name of the kernel making the allocation as set in GraphDef,
+   * e.g., "affine2/weights/Assign".
+   * 
+ * + * optional string kernel_name = 2; + */ + public java.lang.String getKernelName() { + java.lang.Object ref = kernelName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + kernelName_ = s; + return s; + } + } + /** + *
+   * Name of the kernel making the allocation as set in GraphDef,
+   * e.g., "affine2/weights/Assign".
+   * 
+ * + * optional string kernel_name = 2; + */ + public com.google.protobuf.ByteString + getKernelNameBytes() { + java.lang.Object ref = kernelName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + kernelName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TENSOR_FIELD_NUMBER = 3; + private org.tensorflow.framework.TensorDescription tensor_; + /** + *
+   * Allocated tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public boolean hasTensor() { + return tensor_ != null; + } + /** + *
+   * Allocated tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public org.tensorflow.framework.TensorDescription getTensor() { + return tensor_ == null ? org.tensorflow.framework.TensorDescription.getDefaultInstance() : tensor_; + } + /** + *
+   * Allocated tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public org.tensorflow.framework.TensorDescriptionOrBuilder getTensorOrBuilder() { + return getTensor(); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (stepId_ != 0L) { + output.writeInt64(1, stepId_); + } + if (!getKernelNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, kernelName_); + } + if (tensor_ != null) { + output.writeMessage(3, getTensor()); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (stepId_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, stepId_); + } + if (!getKernelNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, kernelName_); + } + if (tensor_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getTensor()); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.MemoryLogTensorAllocation)) { + return super.equals(obj); + } + org.tensorflow.framework.MemoryLogTensorAllocation other = (org.tensorflow.framework.MemoryLogTensorAllocation) obj; + + boolean result = true; + result = result && (getStepId() + == other.getStepId()); + result = result && getKernelName() + .equals(other.getKernelName()); + result = result && (hasTensor() == other.hasTensor()); + if (hasTensor()) { + result = result && getTensor() + .equals(other.getTensor()); + } + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + STEP_ID_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStepId()); + hash = (37 * hash) + KERNEL_NAME_FIELD_NUMBER; + hash = (53 * hash) + getKernelName().hashCode(); + if (hasTensor()) { + hash = (37 * hash) + TENSOR_FIELD_NUMBER; + hash = (53 * hash) + getTensor().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.MemoryLogTensorAllocation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogTensorAllocation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorAllocation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogTensorAllocation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorAllocation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogTensorAllocation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorAllocation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogTensorAllocation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorAllocation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogTensorAllocation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.MemoryLogTensorAllocation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.MemoryLogTensorAllocation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.MemoryLogTensorAllocation) + org.tensorflow.framework.MemoryLogTensorAllocationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorAllocation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorAllocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogTensorAllocation.class, org.tensorflow.framework.MemoryLogTensorAllocation.Builder.class); + } + + // Construct using org.tensorflow.framework.MemoryLogTensorAllocation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + stepId_ = 0L; + + kernelName_ = ""; + + if (tensorBuilder_ == null) { + tensor_ = null; + } else { + tensor_ = null; + tensorBuilder_ = null; + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorAllocation_descriptor; + } + + public org.tensorflow.framework.MemoryLogTensorAllocation getDefaultInstanceForType() { + return org.tensorflow.framework.MemoryLogTensorAllocation.getDefaultInstance(); + } + + public org.tensorflow.framework.MemoryLogTensorAllocation build() { + org.tensorflow.framework.MemoryLogTensorAllocation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.MemoryLogTensorAllocation buildPartial() { + org.tensorflow.framework.MemoryLogTensorAllocation result = new org.tensorflow.framework.MemoryLogTensorAllocation(this); + result.stepId_ = stepId_; + result.kernelName_ = kernelName_; + if (tensorBuilder_ == null) { + result.tensor_ = tensor_; + } else { + result.tensor_ = tensorBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.MemoryLogTensorAllocation) { + return mergeFrom((org.tensorflow.framework.MemoryLogTensorAllocation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.MemoryLogTensorAllocation other) { + if (other == org.tensorflow.framework.MemoryLogTensorAllocation.getDefaultInstance()) return this; + if (other.getStepId() != 0L) { + setStepId(other.getStepId()); + } + if (!other.getKernelName().isEmpty()) { + kernelName_ = other.kernelName_; + onChanged(); + } + if (other.hasTensor()) { + mergeTensor(other.getTensor()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.MemoryLogTensorAllocation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.MemoryLogTensorAllocation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private long stepId_ ; + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder setStepId(long value) { + + stepId_ = value; + onChanged(); + return this; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder clearStepId() { + + stepId_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object kernelName_ = ""; + /** + *
+     * Name of the kernel making the allocation as set in GraphDef,
+     * e.g., "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public java.lang.String getKernelName() { + java.lang.Object ref = kernelName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + kernelName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the kernel making the allocation as set in GraphDef,
+     * e.g., "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public com.google.protobuf.ByteString + getKernelNameBytes() { + java.lang.Object ref = kernelName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + kernelName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the kernel making the allocation as set in GraphDef,
+     * e.g., "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public Builder setKernelName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + kernelName_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the kernel making the allocation as set in GraphDef,
+     * e.g., "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public Builder clearKernelName() { + + kernelName_ = getDefaultInstance().getKernelName(); + onChanged(); + return this; + } + /** + *
+     * Name of the kernel making the allocation as set in GraphDef,
+     * e.g., "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public Builder setKernelNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + kernelName_ = value; + onChanged(); + return this; + } + + private org.tensorflow.framework.TensorDescription tensor_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorDescription, org.tensorflow.framework.TensorDescription.Builder, org.tensorflow.framework.TensorDescriptionOrBuilder> tensorBuilder_; + /** + *
+     * Allocated tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public boolean hasTensor() { + return tensorBuilder_ != null || tensor_ != null; + } + /** + *
+     * Allocated tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public org.tensorflow.framework.TensorDescription getTensor() { + if (tensorBuilder_ == null) { + return tensor_ == null ? org.tensorflow.framework.TensorDescription.getDefaultInstance() : tensor_; + } else { + return tensorBuilder_.getMessage(); + } + } + /** + *
+     * Allocated tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public Builder setTensor(org.tensorflow.framework.TensorDescription value) { + if (tensorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tensor_ = value; + onChanged(); + } else { + tensorBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Allocated tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public Builder setTensor( + org.tensorflow.framework.TensorDescription.Builder builderForValue) { + if (tensorBuilder_ == null) { + tensor_ = builderForValue.build(); + onChanged(); + } else { + tensorBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Allocated tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public Builder mergeTensor(org.tensorflow.framework.TensorDescription value) { + if (tensorBuilder_ == null) { + if (tensor_ != null) { + tensor_ = + org.tensorflow.framework.TensorDescription.newBuilder(tensor_).mergeFrom(value).buildPartial(); + } else { + tensor_ = value; + } + onChanged(); + } else { + tensorBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Allocated tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public Builder clearTensor() { + if (tensorBuilder_ == null) { + tensor_ = null; + onChanged(); + } else { + tensor_ = null; + tensorBuilder_ = null; + } + + return this; + } + /** + *
+     * Allocated tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public org.tensorflow.framework.TensorDescription.Builder getTensorBuilder() { + + onChanged(); + return getTensorFieldBuilder().getBuilder(); + } + /** + *
+     * Allocated tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + public org.tensorflow.framework.TensorDescriptionOrBuilder getTensorOrBuilder() { + if (tensorBuilder_ != null) { + return tensorBuilder_.getMessageOrBuilder(); + } else { + return tensor_ == null ? + org.tensorflow.framework.TensorDescription.getDefaultInstance() : tensor_; + } + } + /** + *
+     * Allocated tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorDescription, org.tensorflow.framework.TensorDescription.Builder, org.tensorflow.framework.TensorDescriptionOrBuilder> + getTensorFieldBuilder() { + if (tensorBuilder_ == null) { + tensorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorDescription, org.tensorflow.framework.TensorDescription.Builder, org.tensorflow.framework.TensorDescriptionOrBuilder>( + getTensor(), + getParentForChildren(), + isClean()); + tensor_ = null; + } + return tensorBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.MemoryLogTensorAllocation) + } + + // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogTensorAllocation) + private static final org.tensorflow.framework.MemoryLogTensorAllocation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.MemoryLogTensorAllocation(); + } + + public static org.tensorflow.framework.MemoryLogTensorAllocation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MemoryLogTensorAllocation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MemoryLogTensorAllocation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.MemoryLogTensorAllocation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorAllocationOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorAllocationOrBuilder.java new file mode 100644 index 0000000000000..8150cca536c2e --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorAllocationOrBuilder.java @@ -0,0 +1,63 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +public interface MemoryLogTensorAllocationOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.MemoryLogTensorAllocation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + long getStepId(); + + /** + *
+   * Name of the kernel making the allocation as set in GraphDef,
+   * e.g., "affine2/weights/Assign".
+   * 
+ * + * optional string kernel_name = 2; + */ + java.lang.String getKernelName(); + /** + *
+   * Name of the kernel making the allocation as set in GraphDef,
+   * e.g., "affine2/weights/Assign".
+   * 
+ * + * optional string kernel_name = 2; + */ + com.google.protobuf.ByteString + getKernelNameBytes(); + + /** + *
+   * Allocated tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + boolean hasTensor(); + /** + *
+   * Allocated tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + org.tensorflow.framework.TensorDescription getTensor(); + /** + *
+   * Allocated tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 3; + */ + org.tensorflow.framework.TensorDescriptionOrBuilder getTensorOrBuilder(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorDeallocation.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorDeallocation.java new file mode 100644 index 0000000000000..cb91756885ba0 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorDeallocation.java @@ -0,0 +1,601 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.MemoryLogTensorDeallocation} + */ +public final class MemoryLogTensorDeallocation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.MemoryLogTensorDeallocation) + MemoryLogTensorDeallocationOrBuilder { + // Use MemoryLogTensorDeallocation.newBuilder() to construct. + private MemoryLogTensorDeallocation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private MemoryLogTensorDeallocation() { + allocationId_ = 0L; + allocatorName_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private MemoryLogTensorDeallocation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + allocationId_ = input.readInt64(); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + allocatorName_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorDeallocation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorDeallocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogTensorDeallocation.class, org.tensorflow.framework.MemoryLogTensorDeallocation.Builder.class); + } + + public static final int ALLOCATION_ID_FIELD_NUMBER = 1; + private long allocationId_; + /** + *
+   * Id of the tensor buffer being deallocated, used to match to a
+   * corresponding allocation.
+   * 
+ * + * optional int64 allocation_id = 1; + */ + public long getAllocationId() { + return allocationId_; + } + + public static final int ALLOCATOR_NAME_FIELD_NUMBER = 2; + private volatile java.lang.Object allocatorName_; + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 2; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } + } + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 2; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (allocationId_ != 0L) { + output.writeInt64(1, allocationId_); + } + if (!getAllocatorNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, allocatorName_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (allocationId_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, allocationId_); + } + if (!getAllocatorNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, allocatorName_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.MemoryLogTensorDeallocation)) { + return super.equals(obj); + } + org.tensorflow.framework.MemoryLogTensorDeallocation other = (org.tensorflow.framework.MemoryLogTensorDeallocation) obj; + + boolean result = true; + result = result && (getAllocationId() + == other.getAllocationId()); + result = result && getAllocatorName() + .equals(other.getAllocatorName()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + ALLOCATION_ID_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAllocationId()); + hash = (37 * hash) + ALLOCATOR_NAME_FIELD_NUMBER; + hash = (53 * hash) + getAllocatorName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogTensorDeallocation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.MemoryLogTensorDeallocation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.MemoryLogTensorDeallocation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.MemoryLogTensorDeallocation) + org.tensorflow.framework.MemoryLogTensorDeallocationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorDeallocation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorDeallocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogTensorDeallocation.class, org.tensorflow.framework.MemoryLogTensorDeallocation.Builder.class); + } + + // Construct using org.tensorflow.framework.MemoryLogTensorDeallocation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + allocationId_ = 0L; + + allocatorName_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorDeallocation_descriptor; + } + + public org.tensorflow.framework.MemoryLogTensorDeallocation getDefaultInstanceForType() { + return org.tensorflow.framework.MemoryLogTensorDeallocation.getDefaultInstance(); + } + + public org.tensorflow.framework.MemoryLogTensorDeallocation build() { + org.tensorflow.framework.MemoryLogTensorDeallocation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.MemoryLogTensorDeallocation buildPartial() { + org.tensorflow.framework.MemoryLogTensorDeallocation result = new org.tensorflow.framework.MemoryLogTensorDeallocation(this); + result.allocationId_ = allocationId_; + result.allocatorName_ = allocatorName_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.MemoryLogTensorDeallocation) { + return mergeFrom((org.tensorflow.framework.MemoryLogTensorDeallocation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.MemoryLogTensorDeallocation other) { + if (other == org.tensorflow.framework.MemoryLogTensorDeallocation.getDefaultInstance()) return this; + if (other.getAllocationId() != 0L) { + setAllocationId(other.getAllocationId()); + } + if (!other.getAllocatorName().isEmpty()) { + allocatorName_ = other.allocatorName_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.MemoryLogTensorDeallocation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.MemoryLogTensorDeallocation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private long allocationId_ ; + /** + *
+     * Id of the tensor buffer being deallocated, used to match to a
+     * corresponding allocation.
+     * 
+ * + * optional int64 allocation_id = 1; + */ + public long getAllocationId() { + return allocationId_; + } + /** + *
+     * Id of the tensor buffer being deallocated, used to match to a
+     * corresponding allocation.
+     * 
+ * + * optional int64 allocation_id = 1; + */ + public Builder setAllocationId(long value) { + + allocationId_ = value; + onChanged(); + return this; + } + /** + *
+     * Id of the tensor buffer being deallocated, used to match to a
+     * corresponding allocation.
+     * 
+ * + * optional int64 allocation_id = 1; + */ + public Builder clearAllocationId() { + + allocationId_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object allocatorName_ = ""; + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 2; + */ + public java.lang.String getAllocatorName() { + java.lang.Object ref = allocatorName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + allocatorName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 2; + */ + public com.google.protobuf.ByteString + getAllocatorNameBytes() { + java.lang.Object ref = allocatorName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + allocatorName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 2; + */ + public Builder setAllocatorName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + allocatorName_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 2; + */ + public Builder clearAllocatorName() { + + allocatorName_ = getDefaultInstance().getAllocatorName(); + onChanged(); + return this; + } + /** + *
+     * Name of the allocator used.
+     * 
+ * + * optional string allocator_name = 2; + */ + public Builder setAllocatorNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + allocatorName_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.MemoryLogTensorDeallocation) + } + + // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogTensorDeallocation) + private static final org.tensorflow.framework.MemoryLogTensorDeallocation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.MemoryLogTensorDeallocation(); + } + + public static org.tensorflow.framework.MemoryLogTensorDeallocation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MemoryLogTensorDeallocation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MemoryLogTensorDeallocation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.MemoryLogTensorDeallocation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorDeallocationOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorDeallocationOrBuilder.java new file mode 100644 index 0000000000000..8045970a421fc --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorDeallocationOrBuilder.java @@ -0,0 +1,37 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +public interface MemoryLogTensorDeallocationOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.MemoryLogTensorDeallocation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Id of the tensor buffer being deallocated, used to match to a
+   * corresponding allocation.
+   * 
+ * + * optional int64 allocation_id = 1; + */ + long getAllocationId(); + + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 2; + */ + java.lang.String getAllocatorName(); + /** + *
+   * Name of the allocator used.
+   * 
+ * + * optional string allocator_name = 2; + */ + com.google.protobuf.ByteString + getAllocatorNameBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorOutput.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorOutput.java new file mode 100644 index 0000000000000..9ce3bc17d7b78 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorOutput.java @@ -0,0 +1,907 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.MemoryLogTensorOutput} + */ +public final class MemoryLogTensorOutput extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.MemoryLogTensorOutput) + MemoryLogTensorOutputOrBuilder { + // Use MemoryLogTensorOutput.newBuilder() to construct. + private MemoryLogTensorOutput(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private MemoryLogTensorOutput() { + stepId_ = 0L; + kernelName_ = ""; + index_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private MemoryLogTensorOutput( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + stepId_ = input.readInt64(); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + kernelName_ = s; + break; + } + case 24: { + + index_ = input.readInt32(); + break; + } + case 34: { + org.tensorflow.framework.TensorDescription.Builder subBuilder = null; + if (tensor_ != null) { + subBuilder = tensor_.toBuilder(); + } + tensor_ = input.readMessage(org.tensorflow.framework.TensorDescription.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(tensor_); + tensor_ = subBuilder.buildPartial(); + } + + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorOutput_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorOutput_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogTensorOutput.class, org.tensorflow.framework.MemoryLogTensorOutput.Builder.class); + } + + public static final int STEP_ID_FIELD_NUMBER = 1; + private long stepId_; + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + + public static final int KERNEL_NAME_FIELD_NUMBER = 2; + private volatile java.lang.Object kernelName_; + /** + *
+   * Name of the kernel producing an output as set in GraphDef, e.g.,
+   * "affine2/weights/Assign".
+   * 
+ * + * optional string kernel_name = 2; + */ + public java.lang.String getKernelName() { + java.lang.Object ref = kernelName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + kernelName_ = s; + return s; + } + } + /** + *
+   * Name of the kernel producing an output as set in GraphDef, e.g.,
+   * "affine2/weights/Assign".
+   * 
+ * + * optional string kernel_name = 2; + */ + public com.google.protobuf.ByteString + getKernelNameBytes() { + java.lang.Object ref = kernelName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + kernelName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int INDEX_FIELD_NUMBER = 3; + private int index_; + /** + *
+   * Index of the output being set.
+   * 
+ * + * optional int32 index = 3; + */ + public int getIndex() { + return index_; + } + + public static final int TENSOR_FIELD_NUMBER = 4; + private org.tensorflow.framework.TensorDescription tensor_; + /** + *
+   * Output tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public boolean hasTensor() { + return tensor_ != null; + } + /** + *
+   * Output tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public org.tensorflow.framework.TensorDescription getTensor() { + return tensor_ == null ? org.tensorflow.framework.TensorDescription.getDefaultInstance() : tensor_; + } + /** + *
+   * Output tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public org.tensorflow.framework.TensorDescriptionOrBuilder getTensorOrBuilder() { + return getTensor(); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (stepId_ != 0L) { + output.writeInt64(1, stepId_); + } + if (!getKernelNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, kernelName_); + } + if (index_ != 0) { + output.writeInt32(3, index_); + } + if (tensor_ != null) { + output.writeMessage(4, getTensor()); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (stepId_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, stepId_); + } + if (!getKernelNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, kernelName_); + } + if (index_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, index_); + } + if (tensor_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getTensor()); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.MemoryLogTensorOutput)) { + return super.equals(obj); + } + org.tensorflow.framework.MemoryLogTensorOutput other = (org.tensorflow.framework.MemoryLogTensorOutput) obj; + + boolean result = true; + result = result && (getStepId() + == other.getStepId()); + result = result && getKernelName() + .equals(other.getKernelName()); + result = result && (getIndex() + == other.getIndex()); + result = result && (hasTensor() == other.hasTensor()); + if (hasTensor()) { + result = result && getTensor() + .equals(other.getTensor()); + } + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + STEP_ID_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStepId()); + hash = (37 * hash) + KERNEL_NAME_FIELD_NUMBER; + hash = (53 * hash) + getKernelName().hashCode(); + hash = (37 * hash) + INDEX_FIELD_NUMBER; + hash = (53 * hash) + getIndex(); + if (hasTensor()) { + hash = (37 * hash) + TENSOR_FIELD_NUMBER; + hash = (53 * hash) + getTensor().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.MemoryLogTensorOutput parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogTensorOutput parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorOutput parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.MemoryLogTensorOutput parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorOutput parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogTensorOutput parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorOutput parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogTensorOutput parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.MemoryLogTensorOutput parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.MemoryLogTensorOutput parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.MemoryLogTensorOutput prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.MemoryLogTensorOutput} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.MemoryLogTensorOutput) + org.tensorflow.framework.MemoryLogTensorOutputOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorOutput_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorOutput_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.MemoryLogTensorOutput.class, org.tensorflow.framework.MemoryLogTensorOutput.Builder.class); + } + + // Construct using org.tensorflow.framework.MemoryLogTensorOutput.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + stepId_ = 0L; + + kernelName_ = ""; + + index_ = 0; + + if (tensorBuilder_ == null) { + tensor_ = null; + } else { + tensor_ = null; + tensorBuilder_ = null; + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.LogMemoryProtos.internal_static_tensorflow_MemoryLogTensorOutput_descriptor; + } + + public org.tensorflow.framework.MemoryLogTensorOutput getDefaultInstanceForType() { + return org.tensorflow.framework.MemoryLogTensorOutput.getDefaultInstance(); + } + + public org.tensorflow.framework.MemoryLogTensorOutput build() { + org.tensorflow.framework.MemoryLogTensorOutput result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.MemoryLogTensorOutput buildPartial() { + org.tensorflow.framework.MemoryLogTensorOutput result = new org.tensorflow.framework.MemoryLogTensorOutput(this); + result.stepId_ = stepId_; + result.kernelName_ = kernelName_; + result.index_ = index_; + if (tensorBuilder_ == null) { + result.tensor_ = tensor_; + } else { + result.tensor_ = tensorBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.MemoryLogTensorOutput) { + return mergeFrom((org.tensorflow.framework.MemoryLogTensorOutput)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.MemoryLogTensorOutput other) { + if (other == org.tensorflow.framework.MemoryLogTensorOutput.getDefaultInstance()) return this; + if (other.getStepId() != 0L) { + setStepId(other.getStepId()); + } + if (!other.getKernelName().isEmpty()) { + kernelName_ = other.kernelName_; + onChanged(); + } + if (other.getIndex() != 0) { + setIndex(other.getIndex()); + } + if (other.hasTensor()) { + mergeTensor(other.getTensor()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.MemoryLogTensorOutput parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.MemoryLogTensorOutput) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private long stepId_ ; + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public long getStepId() { + return stepId_; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder setStepId(long value) { + + stepId_ = value; + onChanged(); + return this; + } + /** + *
+     * Process-unique step id.
+     * 
+ * + * optional int64 step_id = 1; + */ + public Builder clearStepId() { + + stepId_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object kernelName_ = ""; + /** + *
+     * Name of the kernel producing an output as set in GraphDef, e.g.,
+     * "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public java.lang.String getKernelName() { + java.lang.Object ref = kernelName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + kernelName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the kernel producing an output as set in GraphDef, e.g.,
+     * "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public com.google.protobuf.ByteString + getKernelNameBytes() { + java.lang.Object ref = kernelName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + kernelName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the kernel producing an output as set in GraphDef, e.g.,
+     * "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public Builder setKernelName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + kernelName_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the kernel producing an output as set in GraphDef, e.g.,
+     * "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public Builder clearKernelName() { + + kernelName_ = getDefaultInstance().getKernelName(); + onChanged(); + return this; + } + /** + *
+     * Name of the kernel producing an output as set in GraphDef, e.g.,
+     * "affine2/weights/Assign".
+     * 
+ * + * optional string kernel_name = 2; + */ + public Builder setKernelNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + kernelName_ = value; + onChanged(); + return this; + } + + private int index_ ; + /** + *
+     * Index of the output being set.
+     * 
+ * + * optional int32 index = 3; + */ + public int getIndex() { + return index_; + } + /** + *
+     * Index of the output being set.
+     * 
+ * + * optional int32 index = 3; + */ + public Builder setIndex(int value) { + + index_ = value; + onChanged(); + return this; + } + /** + *
+     * Index of the output being set.
+     * 
+ * + * optional int32 index = 3; + */ + public Builder clearIndex() { + + index_ = 0; + onChanged(); + return this; + } + + private org.tensorflow.framework.TensorDescription tensor_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorDescription, org.tensorflow.framework.TensorDescription.Builder, org.tensorflow.framework.TensorDescriptionOrBuilder> tensorBuilder_; + /** + *
+     * Output tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public boolean hasTensor() { + return tensorBuilder_ != null || tensor_ != null; + } + /** + *
+     * Output tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public org.tensorflow.framework.TensorDescription getTensor() { + if (tensorBuilder_ == null) { + return tensor_ == null ? org.tensorflow.framework.TensorDescription.getDefaultInstance() : tensor_; + } else { + return tensorBuilder_.getMessage(); + } + } + /** + *
+     * Output tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public Builder setTensor(org.tensorflow.framework.TensorDescription value) { + if (tensorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tensor_ = value; + onChanged(); + } else { + tensorBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Output tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public Builder setTensor( + org.tensorflow.framework.TensorDescription.Builder builderForValue) { + if (tensorBuilder_ == null) { + tensor_ = builderForValue.build(); + onChanged(); + } else { + tensorBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Output tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public Builder mergeTensor(org.tensorflow.framework.TensorDescription value) { + if (tensorBuilder_ == null) { + if (tensor_ != null) { + tensor_ = + org.tensorflow.framework.TensorDescription.newBuilder(tensor_).mergeFrom(value).buildPartial(); + } else { + tensor_ = value; + } + onChanged(); + } else { + tensorBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Output tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public Builder clearTensor() { + if (tensorBuilder_ == null) { + tensor_ = null; + onChanged(); + } else { + tensor_ = null; + tensorBuilder_ = null; + } + + return this; + } + /** + *
+     * Output tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public org.tensorflow.framework.TensorDescription.Builder getTensorBuilder() { + + onChanged(); + return getTensorFieldBuilder().getBuilder(); + } + /** + *
+     * Output tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + public org.tensorflow.framework.TensorDescriptionOrBuilder getTensorOrBuilder() { + if (tensorBuilder_ != null) { + return tensorBuilder_.getMessageOrBuilder(); + } else { + return tensor_ == null ? + org.tensorflow.framework.TensorDescription.getDefaultInstance() : tensor_; + } + } + /** + *
+     * Output tensor details.
+     * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorDescription, org.tensorflow.framework.TensorDescription.Builder, org.tensorflow.framework.TensorDescriptionOrBuilder> + getTensorFieldBuilder() { + if (tensorBuilder_ == null) { + tensorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorDescription, org.tensorflow.framework.TensorDescription.Builder, org.tensorflow.framework.TensorDescriptionOrBuilder>( + getTensor(), + getParentForChildren(), + isClean()); + tensor_ = null; + } + return tensorBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.MemoryLogTensorOutput) + } + + // @@protoc_insertion_point(class_scope:tensorflow.MemoryLogTensorOutput) + private static final org.tensorflow.framework.MemoryLogTensorOutput DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.MemoryLogTensorOutput(); + } + + public static org.tensorflow.framework.MemoryLogTensorOutput getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public MemoryLogTensorOutput parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MemoryLogTensorOutput(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.MemoryLogTensorOutput getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorOutputOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorOutputOrBuilder.java new file mode 100644 index 0000000000000..c1cfe0f657cf0 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/MemoryLogTensorOutputOrBuilder.java @@ -0,0 +1,72 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/log_memory.proto + +package org.tensorflow.framework; + +public interface MemoryLogTensorOutputOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.MemoryLogTensorOutput) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Process-unique step id.
+   * 
+ * + * optional int64 step_id = 1; + */ + long getStepId(); + + /** + *
+   * Name of the kernel producing an output as set in GraphDef, e.g.,
+   * "affine2/weights/Assign".
+   * 
+ * + * optional string kernel_name = 2; + */ + java.lang.String getKernelName(); + /** + *
+   * Name of the kernel producing an output as set in GraphDef, e.g.,
+   * "affine2/weights/Assign".
+   * 
+ * + * optional string kernel_name = 2; + */ + com.google.protobuf.ByteString + getKernelNameBytes(); + + /** + *
+   * Index of the output being set.
+   * 
+ * + * optional int32 index = 3; + */ + int getIndex(); + + /** + *
+   * Output tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + boolean hasTensor(); + /** + *
+   * Output tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + org.tensorflow.framework.TensorDescription getTensor(); + /** + *
+   * Output tensor details.
+   * 
+ * + * optional .tensorflow.TensorDescription tensor = 4; + */ + org.tensorflow.framework.TensorDescriptionOrBuilder getTensorOrBuilder(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NameAttrList.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NameAttrList.java new file mode 100644 index 0000000000000..26071d0794a75 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NameAttrList.java @@ -0,0 +1,777 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/attr_value.proto + +package org.tensorflow.framework; + +/** + *
+ * A list of attr names and their values. The whole list is attached
+ * with a string name.  E.g., MatMul[T=float].
+ * 
+ * + * Protobuf type {@code tensorflow.NameAttrList} + */ +public final class NameAttrList extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.NameAttrList) + NameAttrListOrBuilder { + // Use NameAttrList.newBuilder() to construct. + private NameAttrList(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private NameAttrList() { + name_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private NameAttrList( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + attr_ = com.google.protobuf.MapField.newMapField( + AttrDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000002; + } + com.google.protobuf.MapEntry + attr__ = input.readMessage( + AttrDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + attr_.getMutableMap().put( + attr__.getKey(), attr__.getValue()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_NameAttrList_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 2: + return internalGetAttr(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_NameAttrList_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.NameAttrList.class, org.tensorflow.framework.NameAttrList.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ATTR_FIELD_NUMBER = 2; + private static final class AttrDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, org.tensorflow.framework.AttrValue> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_NameAttrList_AttrEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + org.tensorflow.framework.AttrValue.getDefaultInstance()); + } + private com.google.protobuf.MapField< + java.lang.String, org.tensorflow.framework.AttrValue> attr_; + private com.google.protobuf.MapField + internalGetAttr() { + if (attr_ == null) { + return com.google.protobuf.MapField.emptyMapField( + AttrDefaultEntryHolder.defaultEntry); + } + return attr_; + } + + public int getAttrCount() { + return internalGetAttr().getMap().size(); + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public boolean containsAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetAttr().getMap().containsKey(key); + } + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getAttr() { + return getAttrMap(); + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public java.util.Map getAttrMap() { + return internalGetAttr().getMap(); + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetAttr(), + AttrDefaultEntryHolder.defaultEntry, + 2); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + for (java.util.Map.Entry entry + : internalGetAttr().getMap().entrySet()) { + com.google.protobuf.MapEntry + attr__ = AttrDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, attr__); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.NameAttrList)) { + return super.equals(obj); + } + org.tensorflow.framework.NameAttrList other = (org.tensorflow.framework.NameAttrList) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && internalGetAttr().equals( + other.internalGetAttr()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (!internalGetAttr().getMap().isEmpty()) { + hash = (37 * hash) + ATTR_FIELD_NUMBER; + hash = (53 * hash) + internalGetAttr().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.NameAttrList parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.NameAttrList parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.NameAttrList parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.NameAttrList parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.NameAttrList parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NameAttrList parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.NameAttrList parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NameAttrList parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.NameAttrList parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NameAttrList parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.NameAttrList prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A list of attr names and their values. The whole list is attached
+   * with a string name.  E.g., MatMul[T=float].
+   * 
+ * + * Protobuf type {@code tensorflow.NameAttrList} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.NameAttrList) + org.tensorflow.framework.NameAttrListOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_NameAttrList_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 2: + return internalGetAttr(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 2: + return internalGetMutableAttr(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_NameAttrList_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.NameAttrList.class, org.tensorflow.framework.NameAttrList.Builder.class); + } + + // Construct using org.tensorflow.framework.NameAttrList.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + internalGetMutableAttr().clear(); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.AttrValueProtos.internal_static_tensorflow_NameAttrList_descriptor; + } + + public org.tensorflow.framework.NameAttrList getDefaultInstanceForType() { + return org.tensorflow.framework.NameAttrList.getDefaultInstance(); + } + + public org.tensorflow.framework.NameAttrList build() { + org.tensorflow.framework.NameAttrList result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.NameAttrList buildPartial() { + org.tensorflow.framework.NameAttrList result = new org.tensorflow.framework.NameAttrList(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.name_ = name_; + result.attr_ = internalGetAttr(); + result.attr_.makeImmutable(); + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.NameAttrList) { + return mergeFrom((org.tensorflow.framework.NameAttrList)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.NameAttrList other) { + if (other == org.tensorflow.framework.NameAttrList.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + internalGetMutableAttr().mergeFrom( + other.internalGetAttr()); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.NameAttrList parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.NameAttrList) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * optional string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + java.lang.String, org.tensorflow.framework.AttrValue> attr_; + private com.google.protobuf.MapField + internalGetAttr() { + if (attr_ == null) { + return com.google.protobuf.MapField.emptyMapField( + AttrDefaultEntryHolder.defaultEntry); + } + return attr_; + } + private com.google.protobuf.MapField + internalGetMutableAttr() { + onChanged();; + if (attr_ == null) { + attr_ = com.google.protobuf.MapField.newMapField( + AttrDefaultEntryHolder.defaultEntry); + } + if (!attr_.isMutable()) { + attr_ = attr_.copy(); + } + return attr_; + } + + public int getAttrCount() { + return internalGetAttr().getMap().size(); + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public boolean containsAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetAttr().getMap().containsKey(key); + } + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getAttr() { + return getAttrMap(); + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public java.util.Map getAttrMap() { + return internalGetAttr().getMap(); + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearAttr() { + getMutableAttr().clear(); + return this; + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public Builder removeAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + getMutableAttr().remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableAttr() { + return internalGetMutableAttr().getMutableMap(); + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + public Builder putAttr( + java.lang.String key, + org.tensorflow.framework.AttrValue value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + getMutableAttr().put(key, value); + return this; + } + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + public Builder putAllAttr( + java.util.Map values) { + getMutableAttr().putAll(values); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.NameAttrList) + } + + // @@protoc_insertion_point(class_scope:tensorflow.NameAttrList) + private static final org.tensorflow.framework.NameAttrList DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.NameAttrList(); + } + + public static org.tensorflow.framework.NameAttrList getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NameAttrList parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NameAttrList(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.NameAttrList getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NameAttrListOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NameAttrListOrBuilder.java new file mode 100644 index 0000000000000..110c793a89b4d --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NameAttrListOrBuilder.java @@ -0,0 +1,53 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/attr_value.proto + +package org.tensorflow.framework; + +public interface NameAttrListOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.NameAttrList) + com.google.protobuf.MessageOrBuilder { + + /** + * optional string name = 1; + */ + java.lang.String getName(); + /** + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + int getAttrCount(); + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + boolean containsAttr( + java.lang.String key); + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getAttr(); + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + java.util.Map + getAttrMap(); + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue); + /** + * map<string, .tensorflow.AttrValue> attr = 2; + */ + + org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeDef.java new file mode 100644 index 0000000000000..3e3383817ea3d --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeDef.java @@ -0,0 +1,1684 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/node_def.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.NodeDef} + */ +public final class NodeDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.NodeDef) + NodeDefOrBuilder { + // Use NodeDef.newBuilder() to construct. + private NodeDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private NodeDef() { + name_ = ""; + op_ = ""; + input_ = com.google.protobuf.LazyStringArrayList.EMPTY; + device_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private NodeDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + op_ = s; + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + input_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000004; + } + input_.add(s); + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + device_ = s; + break; + } + case 42: { + if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { + attr_ = com.google.protobuf.MapField.newMapField( + AttrDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000010; + } + com.google.protobuf.MapEntry + attr__ = input.readMessage( + AttrDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + attr_.getMutableMap().put( + attr__.getKey(), attr__.getValue()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + input_ = input_.getUnmodifiableView(); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.NodeProto.internal_static_tensorflow_NodeDef_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 5: + return internalGetAttr(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.NodeProto.internal_static_tensorflow_NodeDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.NodeDef.class, org.tensorflow.framework.NodeDef.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * The name given to this operator. Used for naming inputs,
+   * logging, visualization, etc.  Unique within a single GraphDef.
+   * Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*".
+   * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * The name given to this operator. Used for naming inputs,
+   * logging, visualization, etc.  Unique within a single GraphDef.
+   * Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*".
+   * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int OP_FIELD_NUMBER = 2; + private volatile java.lang.Object op_; + /** + *
+   * The operation name.  There may be custom parameters in attrs.
+   * Op names starting with an underscore are reserved for internal use.
+   * 
+ * + * optional string op = 2; + */ + public java.lang.String getOp() { + java.lang.Object ref = op_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + op_ = s; + return s; + } + } + /** + *
+   * The operation name.  There may be custom parameters in attrs.
+   * Op names starting with an underscore are reserved for internal use.
+   * 
+ * + * optional string op = 2; + */ + public com.google.protobuf.ByteString + getOpBytes() { + java.lang.Object ref = op_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + op_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int INPUT_FIELD_NUMBER = 3; + private com.google.protobuf.LazyStringList input_; + /** + *
+   * Each input is "node:src_output" with "node" being a string name and
+   * "src_output" indicating which output tensor to use from "node". If
+   * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+   * may optionally be followed by control inputs that have the format
+   * "^node".
+   * 
+ * + * repeated string input = 3; + */ + public com.google.protobuf.ProtocolStringList + getInputList() { + return input_; + } + /** + *
+   * Each input is "node:src_output" with "node" being a string name and
+   * "src_output" indicating which output tensor to use from "node". If
+   * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+   * may optionally be followed by control inputs that have the format
+   * "^node".
+   * 
+ * + * repeated string input = 3; + */ + public int getInputCount() { + return input_.size(); + } + /** + *
+   * Each input is "node:src_output" with "node" being a string name and
+   * "src_output" indicating which output tensor to use from "node". If
+   * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+   * may optionally be followed by control inputs that have the format
+   * "^node".
+   * 
+ * + * repeated string input = 3; + */ + public java.lang.String getInput(int index) { + return input_.get(index); + } + /** + *
+   * Each input is "node:src_output" with "node" being a string name and
+   * "src_output" indicating which output tensor to use from "node". If
+   * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+   * may optionally be followed by control inputs that have the format
+   * "^node".
+   * 
+ * + * repeated string input = 3; + */ + public com.google.protobuf.ByteString + getInputBytes(int index) { + return input_.getByteString(index); + } + + public static final int DEVICE_FIELD_NUMBER = 4; + private volatile java.lang.Object device_; + /** + *
+   * A (possibly partial) specification for the device on which this
+   * node should be placed.
+   * The expected syntax for this string is as follows:
+   * DEVICE_SPEC ::= COLOCATED_NODE | PARTIAL_SPEC
+   * COLOCATED_NODE ::= "@" NODE_NAME  // See NodeDef.name above.
+   * PARTIAL_SPEC ::= ("/" CONSTRAINT) *
+   * CONSTRAINT ::= ("job:" JOB_NAME)
+   *              | ("replica:" [1-9][0-9]*)
+   *              | ("task:" [1-9][0-9]*)
+   *              | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") )
+   * Valid values for this string include:
+   * * "@other/node"                         (colocate with "other/node")
+   * * "/job:worker/replica:0/task:1/gpu:3"  (full specification)
+   * * "/job:worker/gpu:3"                   (partial specification)
+   * * ""                                    (no specification)
+   * If the constraints do not resolve to a single device (or if this
+   * field is empty or not present), the runtime will attempt to
+   * choose a device automatically.
+   * 
+ * + * optional string device = 4; + */ + public java.lang.String getDevice() { + java.lang.Object ref = device_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + device_ = s; + return s; + } + } + /** + *
+   * A (possibly partial) specification for the device on which this
+   * node should be placed.
+   * The expected syntax for this string is as follows:
+   * DEVICE_SPEC ::= COLOCATED_NODE | PARTIAL_SPEC
+   * COLOCATED_NODE ::= "@" NODE_NAME  // See NodeDef.name above.
+   * PARTIAL_SPEC ::= ("/" CONSTRAINT) *
+   * CONSTRAINT ::= ("job:" JOB_NAME)
+   *              | ("replica:" [1-9][0-9]*)
+   *              | ("task:" [1-9][0-9]*)
+   *              | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") )
+   * Valid values for this string include:
+   * * "@other/node"                         (colocate with "other/node")
+   * * "/job:worker/replica:0/task:1/gpu:3"  (full specification)
+   * * "/job:worker/gpu:3"                   (partial specification)
+   * * ""                                    (no specification)
+   * If the constraints do not resolve to a single device (or if this
+   * field is empty or not present), the runtime will attempt to
+   * choose a device automatically.
+   * 
+ * + * optional string device = 4; + */ + public com.google.protobuf.ByteString + getDeviceBytes() { + java.lang.Object ref = device_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + device_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ATTR_FIELD_NUMBER = 5; + private static final class AttrDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, org.tensorflow.framework.AttrValue> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.tensorflow.framework.NodeProto.internal_static_tensorflow_NodeDef_AttrEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + org.tensorflow.framework.AttrValue.getDefaultInstance()); + } + private com.google.protobuf.MapField< + java.lang.String, org.tensorflow.framework.AttrValue> attr_; + private com.google.protobuf.MapField + internalGetAttr() { + if (attr_ == null) { + return com.google.protobuf.MapField.emptyMapField( + AttrDefaultEntryHolder.defaultEntry); + } + return attr_; + } + + public int getAttrCount() { + return internalGetAttr().getMap().size(); + } + /** + *
+   * Operation-specific graph-construction-time configuration.
+   * Note that this should include all attrs defined in the
+   * corresponding OpDef, including those with a value matching
+   * the default -- this allows the default to change and makes
+   * NodeDefs easier to interpret on their own.  However, if
+   * an attr with a default is not specified in this list, the
+   * default will be used.
+   * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+   * one of the names from the corresponding OpDef's attr field).
+   * The values must have a type matching the corresponding OpDef
+   * attr's type field.
+   * TODO(josh11b): Add some examples here showing best practices.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public boolean containsAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetAttr().getMap().containsKey(key); + } + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getAttr() { + return getAttrMap(); + } + /** + *
+   * Operation-specific graph-construction-time configuration.
+   * Note that this should include all attrs defined in the
+   * corresponding OpDef, including those with a value matching
+   * the default -- this allows the default to change and makes
+   * NodeDefs easier to interpret on their own.  However, if
+   * an attr with a default is not specified in this list, the
+   * default will be used.
+   * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+   * one of the names from the corresponding OpDef's attr field).
+   * The values must have a type matching the corresponding OpDef
+   * attr's type field.
+   * TODO(josh11b): Add some examples here showing best practices.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public java.util.Map getAttrMap() { + return internalGetAttr().getMap(); + } + /** + *
+   * Operation-specific graph-construction-time configuration.
+   * Note that this should include all attrs defined in the
+   * corresponding OpDef, including those with a value matching
+   * the default -- this allows the default to change and makes
+   * NodeDefs easier to interpret on their own.  However, if
+   * an attr with a default is not specified in this list, the
+   * default will be used.
+   * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+   * one of the names from the corresponding OpDef's attr field).
+   * The values must have a type matching the corresponding OpDef
+   * attr's type field.
+   * TODO(josh11b): Add some examples here showing best practices.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+   * Operation-specific graph-construction-time configuration.
+   * Note that this should include all attrs defined in the
+   * corresponding OpDef, including those with a value matching
+   * the default -- this allows the default to change and makes
+   * NodeDefs easier to interpret on their own.  However, if
+   * an attr with a default is not specified in this list, the
+   * default will be used.
+   * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+   * one of the names from the corresponding OpDef's attr field).
+   * The values must have a type matching the corresponding OpDef
+   * attr's type field.
+   * TODO(josh11b): Add some examples here showing best practices.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (!getOpBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, op_); + } + for (int i = 0; i < input_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, input_.getRaw(i)); + } + if (!getDeviceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, device_); + } + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetAttr(), + AttrDefaultEntryHolder.defaultEntry, + 5); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (!getOpBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, op_); + } + { + int dataSize = 0; + for (int i = 0; i < input_.size(); i++) { + dataSize += computeStringSizeNoTag(input_.getRaw(i)); + } + size += dataSize; + size += 1 * getInputList().size(); + } + if (!getDeviceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, device_); + } + for (java.util.Map.Entry entry + : internalGetAttr().getMap().entrySet()) { + com.google.protobuf.MapEntry + attr__ = AttrDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, attr__); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.NodeDef)) { + return super.equals(obj); + } + org.tensorflow.framework.NodeDef other = (org.tensorflow.framework.NodeDef) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && getOp() + .equals(other.getOp()); + result = result && getInputList() + .equals(other.getInputList()); + result = result && getDevice() + .equals(other.getDevice()); + result = result && internalGetAttr().equals( + other.internalGetAttr()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + OP_FIELD_NUMBER; + hash = (53 * hash) + getOp().hashCode(); + if (getInputCount() > 0) { + hash = (37 * hash) + INPUT_FIELD_NUMBER; + hash = (53 * hash) + getInputList().hashCode(); + } + hash = (37 * hash) + DEVICE_FIELD_NUMBER; + hash = (53 * hash) + getDevice().hashCode(); + if (!internalGetAttr().getMap().isEmpty()) { + hash = (37 * hash) + ATTR_FIELD_NUMBER; + hash = (53 * hash) + internalGetAttr().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.NodeDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.NodeDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.NodeDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.NodeDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.NodeDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NodeDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.NodeDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NodeDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.NodeDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NodeDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.NodeDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.NodeDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.NodeDef) + org.tensorflow.framework.NodeDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.NodeProto.internal_static_tensorflow_NodeDef_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 5: + return internalGetAttr(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 5: + return internalGetMutableAttr(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.NodeProto.internal_static_tensorflow_NodeDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.NodeDef.class, org.tensorflow.framework.NodeDef.Builder.class); + } + + // Construct using org.tensorflow.framework.NodeDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + op_ = ""; + + input_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + device_ = ""; + + internalGetMutableAttr().clear(); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.NodeProto.internal_static_tensorflow_NodeDef_descriptor; + } + + public org.tensorflow.framework.NodeDef getDefaultInstanceForType() { + return org.tensorflow.framework.NodeDef.getDefaultInstance(); + } + + public org.tensorflow.framework.NodeDef build() { + org.tensorflow.framework.NodeDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.NodeDef buildPartial() { + org.tensorflow.framework.NodeDef result = new org.tensorflow.framework.NodeDef(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.name_ = name_; + result.op_ = op_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + input_ = input_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.input_ = input_; + result.device_ = device_; + result.attr_ = internalGetAttr(); + result.attr_.makeImmutable(); + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.NodeDef) { + return mergeFrom((org.tensorflow.framework.NodeDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.NodeDef other) { + if (other == org.tensorflow.framework.NodeDef.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.getOp().isEmpty()) { + op_ = other.op_; + onChanged(); + } + if (!other.input_.isEmpty()) { + if (input_.isEmpty()) { + input_ = other.input_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureInputIsMutable(); + input_.addAll(other.input_); + } + onChanged(); + } + if (!other.getDevice().isEmpty()) { + device_ = other.device_; + onChanged(); + } + internalGetMutableAttr().mergeFrom( + other.internalGetAttr()); + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.NodeDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.NodeDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+     * The name given to this operator. Used for naming inputs,
+     * logging, visualization, etc.  Unique within a single GraphDef.
+     * Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*".
+     * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * The name given to this operator. Used for naming inputs,
+     * logging, visualization, etc.  Unique within a single GraphDef.
+     * Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*".
+     * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * The name given to this operator. Used for naming inputs,
+     * logging, visualization, etc.  Unique within a single GraphDef.
+     * Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*".
+     * 
+ * + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * The name given to this operator. Used for naming inputs,
+     * logging, visualization, etc.  Unique within a single GraphDef.
+     * Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*".
+     * 
+ * + * optional string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * The name given to this operator. Used for naming inputs,
+     * logging, visualization, etc.  Unique within a single GraphDef.
+     * Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*".
+     * 
+ * + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.lang.Object op_ = ""; + /** + *
+     * The operation name.  There may be custom parameters in attrs.
+     * Op names starting with an underscore are reserved for internal use.
+     * 
+ * + * optional string op = 2; + */ + public java.lang.String getOp() { + java.lang.Object ref = op_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + op_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * The operation name.  There may be custom parameters in attrs.
+     * Op names starting with an underscore are reserved for internal use.
+     * 
+ * + * optional string op = 2; + */ + public com.google.protobuf.ByteString + getOpBytes() { + java.lang.Object ref = op_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + op_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * The operation name.  There may be custom parameters in attrs.
+     * Op names starting with an underscore are reserved for internal use.
+     * 
+ * + * optional string op = 2; + */ + public Builder setOp( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + op_ = value; + onChanged(); + return this; + } + /** + *
+     * The operation name.  There may be custom parameters in attrs.
+     * Op names starting with an underscore are reserved for internal use.
+     * 
+ * + * optional string op = 2; + */ + public Builder clearOp() { + + op_ = getDefaultInstance().getOp(); + onChanged(); + return this; + } + /** + *
+     * The operation name.  There may be custom parameters in attrs.
+     * Op names starting with an underscore are reserved for internal use.
+     * 
+ * + * optional string op = 2; + */ + public Builder setOpBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + op_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList input_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureInputIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + input_ = new com.google.protobuf.LazyStringArrayList(input_); + bitField0_ |= 0x00000004; + } + } + /** + *
+     * Each input is "node:src_output" with "node" being a string name and
+     * "src_output" indicating which output tensor to use from "node". If
+     * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+     * may optionally be followed by control inputs that have the format
+     * "^node".
+     * 
+ * + * repeated string input = 3; + */ + public com.google.protobuf.ProtocolStringList + getInputList() { + return input_.getUnmodifiableView(); + } + /** + *
+     * Each input is "node:src_output" with "node" being a string name and
+     * "src_output" indicating which output tensor to use from "node". If
+     * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+     * may optionally be followed by control inputs that have the format
+     * "^node".
+     * 
+ * + * repeated string input = 3; + */ + public int getInputCount() { + return input_.size(); + } + /** + *
+     * Each input is "node:src_output" with "node" being a string name and
+     * "src_output" indicating which output tensor to use from "node". If
+     * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+     * may optionally be followed by control inputs that have the format
+     * "^node".
+     * 
+ * + * repeated string input = 3; + */ + public java.lang.String getInput(int index) { + return input_.get(index); + } + /** + *
+     * Each input is "node:src_output" with "node" being a string name and
+     * "src_output" indicating which output tensor to use from "node". If
+     * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+     * may optionally be followed by control inputs that have the format
+     * "^node".
+     * 
+ * + * repeated string input = 3; + */ + public com.google.protobuf.ByteString + getInputBytes(int index) { + return input_.getByteString(index); + } + /** + *
+     * Each input is "node:src_output" with "node" being a string name and
+     * "src_output" indicating which output tensor to use from "node". If
+     * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+     * may optionally be followed by control inputs that have the format
+     * "^node".
+     * 
+ * + * repeated string input = 3; + */ + public Builder setInput( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputIsMutable(); + input_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Each input is "node:src_output" with "node" being a string name and
+     * "src_output" indicating which output tensor to use from "node". If
+     * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+     * may optionally be followed by control inputs that have the format
+     * "^node".
+     * 
+ * + * repeated string input = 3; + */ + public Builder addInput( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputIsMutable(); + input_.add(value); + onChanged(); + return this; + } + /** + *
+     * Each input is "node:src_output" with "node" being a string name and
+     * "src_output" indicating which output tensor to use from "node". If
+     * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+     * may optionally be followed by control inputs that have the format
+     * "^node".
+     * 
+ * + * repeated string input = 3; + */ + public Builder addAllInput( + java.lang.Iterable values) { + ensureInputIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, input_); + onChanged(); + return this; + } + /** + *
+     * Each input is "node:src_output" with "node" being a string name and
+     * "src_output" indicating which output tensor to use from "node". If
+     * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+     * may optionally be followed by control inputs that have the format
+     * "^node".
+     * 
+ * + * repeated string input = 3; + */ + public Builder clearInput() { + input_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+     * Each input is "node:src_output" with "node" being a string name and
+     * "src_output" indicating which output tensor to use from "node". If
+     * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+     * may optionally be followed by control inputs that have the format
+     * "^node".
+     * 
+ * + * repeated string input = 3; + */ + public Builder addInputBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureInputIsMutable(); + input_.add(value); + onChanged(); + return this; + } + + private java.lang.Object device_ = ""; + /** + *
+     * A (possibly partial) specification for the device on which this
+     * node should be placed.
+     * The expected syntax for this string is as follows:
+     * DEVICE_SPEC ::= COLOCATED_NODE | PARTIAL_SPEC
+     * COLOCATED_NODE ::= "@" NODE_NAME  // See NodeDef.name above.
+     * PARTIAL_SPEC ::= ("/" CONSTRAINT) *
+     * CONSTRAINT ::= ("job:" JOB_NAME)
+     *              | ("replica:" [1-9][0-9]*)
+     *              | ("task:" [1-9][0-9]*)
+     *              | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") )
+     * Valid values for this string include:
+     * * "@other/node"                         (colocate with "other/node")
+     * * "/job:worker/replica:0/task:1/gpu:3"  (full specification)
+     * * "/job:worker/gpu:3"                   (partial specification)
+     * * ""                                    (no specification)
+     * If the constraints do not resolve to a single device (or if this
+     * field is empty or not present), the runtime will attempt to
+     * choose a device automatically.
+     * 
+ * + * optional string device = 4; + */ + public java.lang.String getDevice() { + java.lang.Object ref = device_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + device_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * A (possibly partial) specification for the device on which this
+     * node should be placed.
+     * The expected syntax for this string is as follows:
+     * DEVICE_SPEC ::= COLOCATED_NODE | PARTIAL_SPEC
+     * COLOCATED_NODE ::= "@" NODE_NAME  // See NodeDef.name above.
+     * PARTIAL_SPEC ::= ("/" CONSTRAINT) *
+     * CONSTRAINT ::= ("job:" JOB_NAME)
+     *              | ("replica:" [1-9][0-9]*)
+     *              | ("task:" [1-9][0-9]*)
+     *              | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") )
+     * Valid values for this string include:
+     * * "@other/node"                         (colocate with "other/node")
+     * * "/job:worker/replica:0/task:1/gpu:3"  (full specification)
+     * * "/job:worker/gpu:3"                   (partial specification)
+     * * ""                                    (no specification)
+     * If the constraints do not resolve to a single device (or if this
+     * field is empty or not present), the runtime will attempt to
+     * choose a device automatically.
+     * 
+ * + * optional string device = 4; + */ + public com.google.protobuf.ByteString + getDeviceBytes() { + java.lang.Object ref = device_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + device_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * A (possibly partial) specification for the device on which this
+     * node should be placed.
+     * The expected syntax for this string is as follows:
+     * DEVICE_SPEC ::= COLOCATED_NODE | PARTIAL_SPEC
+     * COLOCATED_NODE ::= "@" NODE_NAME  // See NodeDef.name above.
+     * PARTIAL_SPEC ::= ("/" CONSTRAINT) *
+     * CONSTRAINT ::= ("job:" JOB_NAME)
+     *              | ("replica:" [1-9][0-9]*)
+     *              | ("task:" [1-9][0-9]*)
+     *              | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") )
+     * Valid values for this string include:
+     * * "@other/node"                         (colocate with "other/node")
+     * * "/job:worker/replica:0/task:1/gpu:3"  (full specification)
+     * * "/job:worker/gpu:3"                   (partial specification)
+     * * ""                                    (no specification)
+     * If the constraints do not resolve to a single device (or if this
+     * field is empty or not present), the runtime will attempt to
+     * choose a device automatically.
+     * 
+ * + * optional string device = 4; + */ + public Builder setDevice( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + device_ = value; + onChanged(); + return this; + } + /** + *
+     * A (possibly partial) specification for the device on which this
+     * node should be placed.
+     * The expected syntax for this string is as follows:
+     * DEVICE_SPEC ::= COLOCATED_NODE | PARTIAL_SPEC
+     * COLOCATED_NODE ::= "@" NODE_NAME  // See NodeDef.name above.
+     * PARTIAL_SPEC ::= ("/" CONSTRAINT) *
+     * CONSTRAINT ::= ("job:" JOB_NAME)
+     *              | ("replica:" [1-9][0-9]*)
+     *              | ("task:" [1-9][0-9]*)
+     *              | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") )
+     * Valid values for this string include:
+     * * "@other/node"                         (colocate with "other/node")
+     * * "/job:worker/replica:0/task:1/gpu:3"  (full specification)
+     * * "/job:worker/gpu:3"                   (partial specification)
+     * * ""                                    (no specification)
+     * If the constraints do not resolve to a single device (or if this
+     * field is empty or not present), the runtime will attempt to
+     * choose a device automatically.
+     * 
+ * + * optional string device = 4; + */ + public Builder clearDevice() { + + device_ = getDefaultInstance().getDevice(); + onChanged(); + return this; + } + /** + *
+     * A (possibly partial) specification for the device on which this
+     * node should be placed.
+     * The expected syntax for this string is as follows:
+     * DEVICE_SPEC ::= COLOCATED_NODE | PARTIAL_SPEC
+     * COLOCATED_NODE ::= "@" NODE_NAME  // See NodeDef.name above.
+     * PARTIAL_SPEC ::= ("/" CONSTRAINT) *
+     * CONSTRAINT ::= ("job:" JOB_NAME)
+     *              | ("replica:" [1-9][0-9]*)
+     *              | ("task:" [1-9][0-9]*)
+     *              | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") )
+     * Valid values for this string include:
+     * * "@other/node"                         (colocate with "other/node")
+     * * "/job:worker/replica:0/task:1/gpu:3"  (full specification)
+     * * "/job:worker/gpu:3"                   (partial specification)
+     * * ""                                    (no specification)
+     * If the constraints do not resolve to a single device (or if this
+     * field is empty or not present), the runtime will attempt to
+     * choose a device automatically.
+     * 
+ * + * optional string device = 4; + */ + public Builder setDeviceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + device_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + java.lang.String, org.tensorflow.framework.AttrValue> attr_; + private com.google.protobuf.MapField + internalGetAttr() { + if (attr_ == null) { + return com.google.protobuf.MapField.emptyMapField( + AttrDefaultEntryHolder.defaultEntry); + } + return attr_; + } + private com.google.protobuf.MapField + internalGetMutableAttr() { + onChanged();; + if (attr_ == null) { + attr_ = com.google.protobuf.MapField.newMapField( + AttrDefaultEntryHolder.defaultEntry); + } + if (!attr_.isMutable()) { + attr_ = attr_.copy(); + } + return attr_; + } + + public int getAttrCount() { + return internalGetAttr().getMap().size(); + } + /** + *
+     * Operation-specific graph-construction-time configuration.
+     * Note that this should include all attrs defined in the
+     * corresponding OpDef, including those with a value matching
+     * the default -- this allows the default to change and makes
+     * NodeDefs easier to interpret on their own.  However, if
+     * an attr with a default is not specified in this list, the
+     * default will be used.
+     * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+     * one of the names from the corresponding OpDef's attr field).
+     * The values must have a type matching the corresponding OpDef
+     * attr's type field.
+     * TODO(josh11b): Add some examples here showing best practices.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public boolean containsAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + return internalGetAttr().getMap().containsKey(key); + } + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + public java.util.Map getAttr() { + return getAttrMap(); + } + /** + *
+     * Operation-specific graph-construction-time configuration.
+     * Note that this should include all attrs defined in the
+     * corresponding OpDef, including those with a value matching
+     * the default -- this allows the default to change and makes
+     * NodeDefs easier to interpret on their own.  However, if
+     * an attr with a default is not specified in this list, the
+     * default will be used.
+     * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+     * one of the names from the corresponding OpDef's attr field).
+     * The values must have a type matching the corresponding OpDef
+     * attr's type field.
+     * TODO(josh11b): Add some examples here showing best practices.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public java.util.Map getAttrMap() { + return internalGetAttr().getMap(); + } + /** + *
+     * Operation-specific graph-construction-time configuration.
+     * Note that this should include all attrs defined in the
+     * corresponding OpDef, including those with a value matching
+     * the default -- this allows the default to change and makes
+     * NodeDefs easier to interpret on their own.  However, if
+     * an attr with a default is not specified in this list, the
+     * default will be used.
+     * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+     * one of the names from the corresponding OpDef's attr field).
+     * The values must have a type matching the corresponding OpDef
+     * attr's type field.
+     * TODO(josh11b): Add some examples here showing best practices.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * Operation-specific graph-construction-time configuration.
+     * Note that this should include all attrs defined in the
+     * corresponding OpDef, including those with a value matching
+     * the default -- this allows the default to change and makes
+     * NodeDefs easier to interpret on their own.  However, if
+     * an attr with a default is not specified in this list, the
+     * default will be used.
+     * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+     * one of the names from the corresponding OpDef's attr field).
+     * The values must have a type matching the corresponding OpDef
+     * attr's type field.
+     * TODO(josh11b): Add some examples here showing best practices.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + java.util.Map map = + internalGetAttr().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearAttr() { + getMutableAttr().clear(); + return this; + } + /** + *
+     * Operation-specific graph-construction-time configuration.
+     * Note that this should include all attrs defined in the
+     * corresponding OpDef, including those with a value matching
+     * the default -- this allows the default to change and makes
+     * NodeDefs easier to interpret on their own.  However, if
+     * an attr with a default is not specified in this list, the
+     * default will be used.
+     * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+     * one of the names from the corresponding OpDef's attr field).
+     * The values must have a type matching the corresponding OpDef
+     * attr's type field.
+     * TODO(josh11b): Add some examples here showing best practices.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public Builder removeAttr( + java.lang.String key) { + if (key == null) { throw new java.lang.NullPointerException(); } + getMutableAttr().remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableAttr() { + return internalGetMutableAttr().getMutableMap(); + } + /** + *
+     * Operation-specific graph-construction-time configuration.
+     * Note that this should include all attrs defined in the
+     * corresponding OpDef, including those with a value matching
+     * the default -- this allows the default to change and makes
+     * NodeDefs easier to interpret on their own.  However, if
+     * an attr with a default is not specified in this list, the
+     * default will be used.
+     * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+     * one of the names from the corresponding OpDef's attr field).
+     * The values must have a type matching the corresponding OpDef
+     * attr's type field.
+     * TODO(josh11b): Add some examples here showing best practices.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + public Builder putAttr( + java.lang.String key, + org.tensorflow.framework.AttrValue value) { + if (key == null) { throw new java.lang.NullPointerException(); } + if (value == null) { throw new java.lang.NullPointerException(); } + getMutableAttr().put(key, value); + return this; + } + /** + *
+     * Operation-specific graph-construction-time configuration.
+     * Note that this should include all attrs defined in the
+     * corresponding OpDef, including those with a value matching
+     * the default -- this allows the default to change and makes
+     * NodeDefs easier to interpret on their own.  However, if
+     * an attr with a default is not specified in this list, the
+     * default will be used.
+     * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+     * one of the names from the corresponding OpDef's attr field).
+     * The values must have a type matching the corresponding OpDef
+     * attr's type field.
+     * TODO(josh11b): Add some examples here showing best practices.
+     * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + public Builder putAllAttr( + java.util.Map values) { + getMutableAttr().putAll(values); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.NodeDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.NodeDef) + private static final org.tensorflow.framework.NodeDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.NodeDef(); + } + + public static org.tensorflow.framework.NodeDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NodeDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NodeDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.NodeDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeDefOrBuilder.java new file mode 100644 index 0000000000000..cd677fac3c85f --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeDefOrBuilder.java @@ -0,0 +1,263 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/node_def.proto + +package org.tensorflow.framework; + +public interface NodeDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.NodeDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * The name given to this operator. Used for naming inputs,
+   * logging, visualization, etc.  Unique within a single GraphDef.
+   * Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*".
+   * 
+ * + * optional string name = 1; + */ + java.lang.String getName(); + /** + *
+   * The name given to this operator. Used for naming inputs,
+   * logging, visualization, etc.  Unique within a single GraphDef.
+   * Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_./]*".
+   * 
+ * + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * The operation name.  There may be custom parameters in attrs.
+   * Op names starting with an underscore are reserved for internal use.
+   * 
+ * + * optional string op = 2; + */ + java.lang.String getOp(); + /** + *
+   * The operation name.  There may be custom parameters in attrs.
+   * Op names starting with an underscore are reserved for internal use.
+   * 
+ * + * optional string op = 2; + */ + com.google.protobuf.ByteString + getOpBytes(); + + /** + *
+   * Each input is "node:src_output" with "node" being a string name and
+   * "src_output" indicating which output tensor to use from "node". If
+   * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+   * may optionally be followed by control inputs that have the format
+   * "^node".
+   * 
+ * + * repeated string input = 3; + */ + java.util.List + getInputList(); + /** + *
+   * Each input is "node:src_output" with "node" being a string name and
+   * "src_output" indicating which output tensor to use from "node". If
+   * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+   * may optionally be followed by control inputs that have the format
+   * "^node".
+   * 
+ * + * repeated string input = 3; + */ + int getInputCount(); + /** + *
+   * Each input is "node:src_output" with "node" being a string name and
+   * "src_output" indicating which output tensor to use from "node". If
+   * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+   * may optionally be followed by control inputs that have the format
+   * "^node".
+   * 
+ * + * repeated string input = 3; + */ + java.lang.String getInput(int index); + /** + *
+   * Each input is "node:src_output" with "node" being a string name and
+   * "src_output" indicating which output tensor to use from "node". If
+   * "src_output" is 0 the ":0" suffix can be omitted.  Regular inputs
+   * may optionally be followed by control inputs that have the format
+   * "^node".
+   * 
+ * + * repeated string input = 3; + */ + com.google.protobuf.ByteString + getInputBytes(int index); + + /** + *
+   * A (possibly partial) specification for the device on which this
+   * node should be placed.
+   * The expected syntax for this string is as follows:
+   * DEVICE_SPEC ::= COLOCATED_NODE | PARTIAL_SPEC
+   * COLOCATED_NODE ::= "@" NODE_NAME  // See NodeDef.name above.
+   * PARTIAL_SPEC ::= ("/" CONSTRAINT) *
+   * CONSTRAINT ::= ("job:" JOB_NAME)
+   *              | ("replica:" [1-9][0-9]*)
+   *              | ("task:" [1-9][0-9]*)
+   *              | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") )
+   * Valid values for this string include:
+   * * "@other/node"                         (colocate with "other/node")
+   * * "/job:worker/replica:0/task:1/gpu:3"  (full specification)
+   * * "/job:worker/gpu:3"                   (partial specification)
+   * * ""                                    (no specification)
+   * If the constraints do not resolve to a single device (or if this
+   * field is empty or not present), the runtime will attempt to
+   * choose a device automatically.
+   * 
+ * + * optional string device = 4; + */ + java.lang.String getDevice(); + /** + *
+   * A (possibly partial) specification for the device on which this
+   * node should be placed.
+   * The expected syntax for this string is as follows:
+   * DEVICE_SPEC ::= COLOCATED_NODE | PARTIAL_SPEC
+   * COLOCATED_NODE ::= "@" NODE_NAME  // See NodeDef.name above.
+   * PARTIAL_SPEC ::= ("/" CONSTRAINT) *
+   * CONSTRAINT ::= ("job:" JOB_NAME)
+   *              | ("replica:" [1-9][0-9]*)
+   *              | ("task:" [1-9][0-9]*)
+   *              | ( ("gpu" | "cpu") ":" ([1-9][0-9]* | "*") )
+   * Valid values for this string include:
+   * * "@other/node"                         (colocate with "other/node")
+   * * "/job:worker/replica:0/task:1/gpu:3"  (full specification)
+   * * "/job:worker/gpu:3"                   (partial specification)
+   * * ""                                    (no specification)
+   * If the constraints do not resolve to a single device (or if this
+   * field is empty or not present), the runtime will attempt to
+   * choose a device automatically.
+   * 
+ * + * optional string device = 4; + */ + com.google.protobuf.ByteString + getDeviceBytes(); + + /** + *
+   * Operation-specific graph-construction-time configuration.
+   * Note that this should include all attrs defined in the
+   * corresponding OpDef, including those with a value matching
+   * the default -- this allows the default to change and makes
+   * NodeDefs easier to interpret on their own.  However, if
+   * an attr with a default is not specified in this list, the
+   * default will be used.
+   * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+   * one of the names from the corresponding OpDef's attr field).
+   * The values must have a type matching the corresponding OpDef
+   * attr's type field.
+   * TODO(josh11b): Add some examples here showing best practices.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + int getAttrCount(); + /** + *
+   * Operation-specific graph-construction-time configuration.
+   * Note that this should include all attrs defined in the
+   * corresponding OpDef, including those with a value matching
+   * the default -- this allows the default to change and makes
+   * NodeDefs easier to interpret on their own.  However, if
+   * an attr with a default is not specified in this list, the
+   * default will be used.
+   * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+   * one of the names from the corresponding OpDef's attr field).
+   * The values must have a type matching the corresponding OpDef
+   * attr's type field.
+   * TODO(josh11b): Add some examples here showing best practices.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + boolean containsAttr( + java.lang.String key); + /** + * Use {@link #getAttrMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getAttr(); + /** + *
+   * Operation-specific graph-construction-time configuration.
+   * Note that this should include all attrs defined in the
+   * corresponding OpDef, including those with a value matching
+   * the default -- this allows the default to change and makes
+   * NodeDefs easier to interpret on their own.  However, if
+   * an attr with a default is not specified in this list, the
+   * default will be used.
+   * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+   * one of the names from the corresponding OpDef's attr field).
+   * The values must have a type matching the corresponding OpDef
+   * attr's type field.
+   * TODO(josh11b): Add some examples here showing best practices.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + java.util.Map + getAttrMap(); + /** + *
+   * Operation-specific graph-construction-time configuration.
+   * Note that this should include all attrs defined in the
+   * corresponding OpDef, including those with a value matching
+   * the default -- this allows the default to change and makes
+   * NodeDefs easier to interpret on their own.  However, if
+   * an attr with a default is not specified in this list, the
+   * default will be used.
+   * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+   * one of the names from the corresponding OpDef's attr field).
+   * The values must have a type matching the corresponding OpDef
+   * attr's type field.
+   * TODO(josh11b): Add some examples here showing best practices.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + org.tensorflow.framework.AttrValue getAttrOrDefault( + java.lang.String key, + org.tensorflow.framework.AttrValue defaultValue); + /** + *
+   * Operation-specific graph-construction-time configuration.
+   * Note that this should include all attrs defined in the
+   * corresponding OpDef, including those with a value matching
+   * the default -- this allows the default to change and makes
+   * NodeDefs easier to interpret on their own.  However, if
+   * an attr with a default is not specified in this list, the
+   * default will be used.
+   * The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
+   * one of the names from the corresponding OpDef's attr field).
+   * The values must have a type matching the corresponding OpDef
+   * attr's type field.
+   * TODO(josh11b): Add some examples here showing best practices.
+   * 
+ * + * map<string, .tensorflow.AttrValue> attr = 5; + */ + + org.tensorflow.framework.AttrValue getAttrOrThrow( + java.lang.String key); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeExecStats.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeExecStats.java new file mode 100644 index 0000000000000..7a57c3f097372 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeExecStats.java @@ -0,0 +1,2065 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +/** + *
+ * Time/size stats recorded for a single execution of a graph node.
+ * 
+ * + * Protobuf type {@code tensorflow.NodeExecStats} + */ +public final class NodeExecStats extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.NodeExecStats) + NodeExecStatsOrBuilder { + // Use NodeExecStats.newBuilder() to construct. + private NodeExecStats(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private NodeExecStats() { + nodeName_ = ""; + allStartMicros_ = 0L; + opStartRelMicros_ = 0L; + opEndRelMicros_ = 0L; + allEndRelMicros_ = 0L; + memory_ = java.util.Collections.emptyList(); + output_ = java.util.Collections.emptyList(); + timelineLabel_ = ""; + scheduledMicros_ = 0L; + threadId_ = 0; + referencedTensor_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private NodeExecStats( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + nodeName_ = s; + break; + } + case 16: { + + allStartMicros_ = input.readInt64(); + break; + } + case 24: { + + opStartRelMicros_ = input.readInt64(); + break; + } + case 32: { + + opEndRelMicros_ = input.readInt64(); + break; + } + case 40: { + + allEndRelMicros_ = input.readInt64(); + break; + } + case 50: { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + memory_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000020; + } + memory_.add( + input.readMessage(org.tensorflow.framework.AllocatorMemoryUsed.parser(), extensionRegistry)); + break; + } + case 58: { + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + output_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000040; + } + output_.add( + input.readMessage(org.tensorflow.framework.NodeOutput.parser(), extensionRegistry)); + break; + } + case 66: { + java.lang.String s = input.readStringRequireUtf8(); + + timelineLabel_ = s; + break; + } + case 72: { + + scheduledMicros_ = input.readInt64(); + break; + } + case 80: { + + threadId_ = input.readUInt32(); + break; + } + case 90: { + if (!((mutable_bitField0_ & 0x00000400) == 0x00000400)) { + referencedTensor_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000400; + } + referencedTensor_.add( + input.readMessage(org.tensorflow.framework.AllocationDescription.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + memory_ = java.util.Collections.unmodifiableList(memory_); + } + if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + output_ = java.util.Collections.unmodifiableList(output_); + } + if (((mutable_bitField0_ & 0x00000400) == 0x00000400)) { + referencedTensor_ = java.util.Collections.unmodifiableList(referencedTensor_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeExecStats_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeExecStats_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.NodeExecStats.class, org.tensorflow.framework.NodeExecStats.Builder.class); + } + + private int bitField0_; + public static final int NODE_NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object nodeName_; + /** + *
+   * TODO(tucker): Use some more compact form of node identity than
+   * the full string name.  Either all processes should agree on a
+   * global id (cost_id?) for each node, or we should use a hash of
+   * the name.
+   * 
+ * + * optional string node_name = 1; + */ + public java.lang.String getNodeName() { + java.lang.Object ref = nodeName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nodeName_ = s; + return s; + } + } + /** + *
+   * TODO(tucker): Use some more compact form of node identity than
+   * the full string name.  Either all processes should agree on a
+   * global id (cost_id?) for each node, or we should use a hash of
+   * the name.
+   * 
+ * + * optional string node_name = 1; + */ + public com.google.protobuf.ByteString + getNodeNameBytes() { + java.lang.Object ref = nodeName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + nodeName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ALL_START_MICROS_FIELD_NUMBER = 2; + private long allStartMicros_; + /** + * optional int64 all_start_micros = 2; + */ + public long getAllStartMicros() { + return allStartMicros_; + } + + public static final int OP_START_REL_MICROS_FIELD_NUMBER = 3; + private long opStartRelMicros_; + /** + * optional int64 op_start_rel_micros = 3; + */ + public long getOpStartRelMicros() { + return opStartRelMicros_; + } + + public static final int OP_END_REL_MICROS_FIELD_NUMBER = 4; + private long opEndRelMicros_; + /** + * optional int64 op_end_rel_micros = 4; + */ + public long getOpEndRelMicros() { + return opEndRelMicros_; + } + + public static final int ALL_END_REL_MICROS_FIELD_NUMBER = 5; + private long allEndRelMicros_; + /** + * optional int64 all_end_rel_micros = 5; + */ + public long getAllEndRelMicros() { + return allEndRelMicros_; + } + + public static final int MEMORY_FIELD_NUMBER = 6; + private java.util.List memory_; + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public java.util.List getMemoryList() { + return memory_; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public java.util.List + getMemoryOrBuilderList() { + return memory_; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public int getMemoryCount() { + return memory_.size(); + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public org.tensorflow.framework.AllocatorMemoryUsed getMemory(int index) { + return memory_.get(index); + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public org.tensorflow.framework.AllocatorMemoryUsedOrBuilder getMemoryOrBuilder( + int index) { + return memory_.get(index); + } + + public static final int OUTPUT_FIELD_NUMBER = 7; + private java.util.List output_; + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public java.util.List getOutputList() { + return output_; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public java.util.List + getOutputOrBuilderList() { + return output_; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public int getOutputCount() { + return output_.size(); + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public org.tensorflow.framework.NodeOutput getOutput(int index) { + return output_.get(index); + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public org.tensorflow.framework.NodeOutputOrBuilder getOutputOrBuilder( + int index) { + return output_.get(index); + } + + public static final int TIMELINE_LABEL_FIELD_NUMBER = 8; + private volatile java.lang.Object timelineLabel_; + /** + * optional string timeline_label = 8; + */ + public java.lang.String getTimelineLabel() { + java.lang.Object ref = timelineLabel_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + timelineLabel_ = s; + return s; + } + } + /** + * optional string timeline_label = 8; + */ + public com.google.protobuf.ByteString + getTimelineLabelBytes() { + java.lang.Object ref = timelineLabel_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + timelineLabel_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int SCHEDULED_MICROS_FIELD_NUMBER = 9; + private long scheduledMicros_; + /** + * optional int64 scheduled_micros = 9; + */ + public long getScheduledMicros() { + return scheduledMicros_; + } + + public static final int THREAD_ID_FIELD_NUMBER = 10; + private int threadId_; + /** + * optional uint32 thread_id = 10; + */ + public int getThreadId() { + return threadId_; + } + + public static final int REFERENCED_TENSOR_FIELD_NUMBER = 11; + private java.util.List referencedTensor_; + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public java.util.List getReferencedTensorList() { + return referencedTensor_; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public java.util.List + getReferencedTensorOrBuilderList() { + return referencedTensor_; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public int getReferencedTensorCount() { + return referencedTensor_.size(); + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public org.tensorflow.framework.AllocationDescription getReferencedTensor(int index) { + return referencedTensor_.get(index); + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public org.tensorflow.framework.AllocationDescriptionOrBuilder getReferencedTensorOrBuilder( + int index) { + return referencedTensor_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNodeNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, nodeName_); + } + if (allStartMicros_ != 0L) { + output.writeInt64(2, allStartMicros_); + } + if (opStartRelMicros_ != 0L) { + output.writeInt64(3, opStartRelMicros_); + } + if (opEndRelMicros_ != 0L) { + output.writeInt64(4, opEndRelMicros_); + } + if (allEndRelMicros_ != 0L) { + output.writeInt64(5, allEndRelMicros_); + } + for (int i = 0; i < memory_.size(); i++) { + output.writeMessage(6, memory_.get(i)); + } + for (int i = 0; i < output_.size(); i++) { + output.writeMessage(7, output_.get(i)); + } + if (!getTimelineLabelBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 8, timelineLabel_); + } + if (scheduledMicros_ != 0L) { + output.writeInt64(9, scheduledMicros_); + } + if (threadId_ != 0) { + output.writeUInt32(10, threadId_); + } + for (int i = 0; i < referencedTensor_.size(); i++) { + output.writeMessage(11, referencedTensor_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNodeNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, nodeName_); + } + if (allStartMicros_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, allStartMicros_); + } + if (opStartRelMicros_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, opStartRelMicros_); + } + if (opEndRelMicros_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(4, opEndRelMicros_); + } + if (allEndRelMicros_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(5, allEndRelMicros_); + } + for (int i = 0; i < memory_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, memory_.get(i)); + } + for (int i = 0; i < output_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, output_.get(i)); + } + if (!getTimelineLabelBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, timelineLabel_); + } + if (scheduledMicros_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(9, scheduledMicros_); + } + if (threadId_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(10, threadId_); + } + for (int i = 0; i < referencedTensor_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(11, referencedTensor_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.NodeExecStats)) { + return super.equals(obj); + } + org.tensorflow.framework.NodeExecStats other = (org.tensorflow.framework.NodeExecStats) obj; + + boolean result = true; + result = result && getNodeName() + .equals(other.getNodeName()); + result = result && (getAllStartMicros() + == other.getAllStartMicros()); + result = result && (getOpStartRelMicros() + == other.getOpStartRelMicros()); + result = result && (getOpEndRelMicros() + == other.getOpEndRelMicros()); + result = result && (getAllEndRelMicros() + == other.getAllEndRelMicros()); + result = result && getMemoryList() + .equals(other.getMemoryList()); + result = result && getOutputList() + .equals(other.getOutputList()); + result = result && getTimelineLabel() + .equals(other.getTimelineLabel()); + result = result && (getScheduledMicros() + == other.getScheduledMicros()); + result = result && (getThreadId() + == other.getThreadId()); + result = result && getReferencedTensorList() + .equals(other.getReferencedTensorList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NODE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getNodeName().hashCode(); + hash = (37 * hash) + ALL_START_MICROS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAllStartMicros()); + hash = (37 * hash) + OP_START_REL_MICROS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getOpStartRelMicros()); + hash = (37 * hash) + OP_END_REL_MICROS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getOpEndRelMicros()); + hash = (37 * hash) + ALL_END_REL_MICROS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getAllEndRelMicros()); + if (getMemoryCount() > 0) { + hash = (37 * hash) + MEMORY_FIELD_NUMBER; + hash = (53 * hash) + getMemoryList().hashCode(); + } + if (getOutputCount() > 0) { + hash = (37 * hash) + OUTPUT_FIELD_NUMBER; + hash = (53 * hash) + getOutputList().hashCode(); + } + hash = (37 * hash) + TIMELINE_LABEL_FIELD_NUMBER; + hash = (53 * hash) + getTimelineLabel().hashCode(); + hash = (37 * hash) + SCHEDULED_MICROS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getScheduledMicros()); + hash = (37 * hash) + THREAD_ID_FIELD_NUMBER; + hash = (53 * hash) + getThreadId(); + if (getReferencedTensorCount() > 0) { + hash = (37 * hash) + REFERENCED_TENSOR_FIELD_NUMBER; + hash = (53 * hash) + getReferencedTensorList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.NodeExecStats parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.NodeExecStats parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.NodeExecStats parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.NodeExecStats parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.NodeExecStats parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NodeExecStats parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.NodeExecStats parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NodeExecStats parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.NodeExecStats parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NodeExecStats parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.NodeExecStats prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Time/size stats recorded for a single execution of a graph node.
+   * 
+ * + * Protobuf type {@code tensorflow.NodeExecStats} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.NodeExecStats) + org.tensorflow.framework.NodeExecStatsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeExecStats_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeExecStats_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.NodeExecStats.class, org.tensorflow.framework.NodeExecStats.Builder.class); + } + + // Construct using org.tensorflow.framework.NodeExecStats.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getMemoryFieldBuilder(); + getOutputFieldBuilder(); + getReferencedTensorFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + nodeName_ = ""; + + allStartMicros_ = 0L; + + opStartRelMicros_ = 0L; + + opEndRelMicros_ = 0L; + + allEndRelMicros_ = 0L; + + if (memoryBuilder_ == null) { + memory_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + } else { + memoryBuilder_.clear(); + } + if (outputBuilder_ == null) { + output_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + } else { + outputBuilder_.clear(); + } + timelineLabel_ = ""; + + scheduledMicros_ = 0L; + + threadId_ = 0; + + if (referencedTensorBuilder_ == null) { + referencedTensor_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000400); + } else { + referencedTensorBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeExecStats_descriptor; + } + + public org.tensorflow.framework.NodeExecStats getDefaultInstanceForType() { + return org.tensorflow.framework.NodeExecStats.getDefaultInstance(); + } + + public org.tensorflow.framework.NodeExecStats build() { + org.tensorflow.framework.NodeExecStats result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.NodeExecStats buildPartial() { + org.tensorflow.framework.NodeExecStats result = new org.tensorflow.framework.NodeExecStats(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.nodeName_ = nodeName_; + result.allStartMicros_ = allStartMicros_; + result.opStartRelMicros_ = opStartRelMicros_; + result.opEndRelMicros_ = opEndRelMicros_; + result.allEndRelMicros_ = allEndRelMicros_; + if (memoryBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020)) { + memory_ = java.util.Collections.unmodifiableList(memory_); + bitField0_ = (bitField0_ & ~0x00000020); + } + result.memory_ = memory_; + } else { + result.memory_ = memoryBuilder_.build(); + } + if (outputBuilder_ == null) { + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output_ = java.util.Collections.unmodifiableList(output_); + bitField0_ = (bitField0_ & ~0x00000040); + } + result.output_ = output_; + } else { + result.output_ = outputBuilder_.build(); + } + result.timelineLabel_ = timelineLabel_; + result.scheduledMicros_ = scheduledMicros_; + result.threadId_ = threadId_; + if (referencedTensorBuilder_ == null) { + if (((bitField0_ & 0x00000400) == 0x00000400)) { + referencedTensor_ = java.util.Collections.unmodifiableList(referencedTensor_); + bitField0_ = (bitField0_ & ~0x00000400); + } + result.referencedTensor_ = referencedTensor_; + } else { + result.referencedTensor_ = referencedTensorBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.NodeExecStats) { + return mergeFrom((org.tensorflow.framework.NodeExecStats)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.NodeExecStats other) { + if (other == org.tensorflow.framework.NodeExecStats.getDefaultInstance()) return this; + if (!other.getNodeName().isEmpty()) { + nodeName_ = other.nodeName_; + onChanged(); + } + if (other.getAllStartMicros() != 0L) { + setAllStartMicros(other.getAllStartMicros()); + } + if (other.getOpStartRelMicros() != 0L) { + setOpStartRelMicros(other.getOpStartRelMicros()); + } + if (other.getOpEndRelMicros() != 0L) { + setOpEndRelMicros(other.getOpEndRelMicros()); + } + if (other.getAllEndRelMicros() != 0L) { + setAllEndRelMicros(other.getAllEndRelMicros()); + } + if (memoryBuilder_ == null) { + if (!other.memory_.isEmpty()) { + if (memory_.isEmpty()) { + memory_ = other.memory_; + bitField0_ = (bitField0_ & ~0x00000020); + } else { + ensureMemoryIsMutable(); + memory_.addAll(other.memory_); + } + onChanged(); + } + } else { + if (!other.memory_.isEmpty()) { + if (memoryBuilder_.isEmpty()) { + memoryBuilder_.dispose(); + memoryBuilder_ = null; + memory_ = other.memory_; + bitField0_ = (bitField0_ & ~0x00000020); + memoryBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getMemoryFieldBuilder() : null; + } else { + memoryBuilder_.addAllMessages(other.memory_); + } + } + } + if (outputBuilder_ == null) { + if (!other.output_.isEmpty()) { + if (output_.isEmpty()) { + output_ = other.output_; + bitField0_ = (bitField0_ & ~0x00000040); + } else { + ensureOutputIsMutable(); + output_.addAll(other.output_); + } + onChanged(); + } + } else { + if (!other.output_.isEmpty()) { + if (outputBuilder_.isEmpty()) { + outputBuilder_.dispose(); + outputBuilder_ = null; + output_ = other.output_; + bitField0_ = (bitField0_ & ~0x00000040); + outputBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getOutputFieldBuilder() : null; + } else { + outputBuilder_.addAllMessages(other.output_); + } + } + } + if (!other.getTimelineLabel().isEmpty()) { + timelineLabel_ = other.timelineLabel_; + onChanged(); + } + if (other.getScheduledMicros() != 0L) { + setScheduledMicros(other.getScheduledMicros()); + } + if (other.getThreadId() != 0) { + setThreadId(other.getThreadId()); + } + if (referencedTensorBuilder_ == null) { + if (!other.referencedTensor_.isEmpty()) { + if (referencedTensor_.isEmpty()) { + referencedTensor_ = other.referencedTensor_; + bitField0_ = (bitField0_ & ~0x00000400); + } else { + ensureReferencedTensorIsMutable(); + referencedTensor_.addAll(other.referencedTensor_); + } + onChanged(); + } + } else { + if (!other.referencedTensor_.isEmpty()) { + if (referencedTensorBuilder_.isEmpty()) { + referencedTensorBuilder_.dispose(); + referencedTensorBuilder_ = null; + referencedTensor_ = other.referencedTensor_; + bitField0_ = (bitField0_ & ~0x00000400); + referencedTensorBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getReferencedTensorFieldBuilder() : null; + } else { + referencedTensorBuilder_.addAllMessages(other.referencedTensor_); + } + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.NodeExecStats parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.NodeExecStats) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object nodeName_ = ""; + /** + *
+     * TODO(tucker): Use some more compact form of node identity than
+     * the full string name.  Either all processes should agree on a
+     * global id (cost_id?) for each node, or we should use a hash of
+     * the name.
+     * 
+ * + * optional string node_name = 1; + */ + public java.lang.String getNodeName() { + java.lang.Object ref = nodeName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nodeName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * TODO(tucker): Use some more compact form of node identity than
+     * the full string name.  Either all processes should agree on a
+     * global id (cost_id?) for each node, or we should use a hash of
+     * the name.
+     * 
+ * + * optional string node_name = 1; + */ + public com.google.protobuf.ByteString + getNodeNameBytes() { + java.lang.Object ref = nodeName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + nodeName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * TODO(tucker): Use some more compact form of node identity than
+     * the full string name.  Either all processes should agree on a
+     * global id (cost_id?) for each node, or we should use a hash of
+     * the name.
+     * 
+ * + * optional string node_name = 1; + */ + public Builder setNodeName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + nodeName_ = value; + onChanged(); + return this; + } + /** + *
+     * TODO(tucker): Use some more compact form of node identity than
+     * the full string name.  Either all processes should agree on a
+     * global id (cost_id?) for each node, or we should use a hash of
+     * the name.
+     * 
+ * + * optional string node_name = 1; + */ + public Builder clearNodeName() { + + nodeName_ = getDefaultInstance().getNodeName(); + onChanged(); + return this; + } + /** + *
+     * TODO(tucker): Use some more compact form of node identity than
+     * the full string name.  Either all processes should agree on a
+     * global id (cost_id?) for each node, or we should use a hash of
+     * the name.
+     * 
+ * + * optional string node_name = 1; + */ + public Builder setNodeNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + nodeName_ = value; + onChanged(); + return this; + } + + private long allStartMicros_ ; + /** + * optional int64 all_start_micros = 2; + */ + public long getAllStartMicros() { + return allStartMicros_; + } + /** + * optional int64 all_start_micros = 2; + */ + public Builder setAllStartMicros(long value) { + + allStartMicros_ = value; + onChanged(); + return this; + } + /** + * optional int64 all_start_micros = 2; + */ + public Builder clearAllStartMicros() { + + allStartMicros_ = 0L; + onChanged(); + return this; + } + + private long opStartRelMicros_ ; + /** + * optional int64 op_start_rel_micros = 3; + */ + public long getOpStartRelMicros() { + return opStartRelMicros_; + } + /** + * optional int64 op_start_rel_micros = 3; + */ + public Builder setOpStartRelMicros(long value) { + + opStartRelMicros_ = value; + onChanged(); + return this; + } + /** + * optional int64 op_start_rel_micros = 3; + */ + public Builder clearOpStartRelMicros() { + + opStartRelMicros_ = 0L; + onChanged(); + return this; + } + + private long opEndRelMicros_ ; + /** + * optional int64 op_end_rel_micros = 4; + */ + public long getOpEndRelMicros() { + return opEndRelMicros_; + } + /** + * optional int64 op_end_rel_micros = 4; + */ + public Builder setOpEndRelMicros(long value) { + + opEndRelMicros_ = value; + onChanged(); + return this; + } + /** + * optional int64 op_end_rel_micros = 4; + */ + public Builder clearOpEndRelMicros() { + + opEndRelMicros_ = 0L; + onChanged(); + return this; + } + + private long allEndRelMicros_ ; + /** + * optional int64 all_end_rel_micros = 5; + */ + public long getAllEndRelMicros() { + return allEndRelMicros_; + } + /** + * optional int64 all_end_rel_micros = 5; + */ + public Builder setAllEndRelMicros(long value) { + + allEndRelMicros_ = value; + onChanged(); + return this; + } + /** + * optional int64 all_end_rel_micros = 5; + */ + public Builder clearAllEndRelMicros() { + + allEndRelMicros_ = 0L; + onChanged(); + return this; + } + + private java.util.List memory_ = + java.util.Collections.emptyList(); + private void ensureMemoryIsMutable() { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { + memory_ = new java.util.ArrayList(memory_); + bitField0_ |= 0x00000020; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.AllocatorMemoryUsed, org.tensorflow.framework.AllocatorMemoryUsed.Builder, org.tensorflow.framework.AllocatorMemoryUsedOrBuilder> memoryBuilder_; + + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public java.util.List getMemoryList() { + if (memoryBuilder_ == null) { + return java.util.Collections.unmodifiableList(memory_); + } else { + return memoryBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public int getMemoryCount() { + if (memoryBuilder_ == null) { + return memory_.size(); + } else { + return memoryBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public org.tensorflow.framework.AllocatorMemoryUsed getMemory(int index) { + if (memoryBuilder_ == null) { + return memory_.get(index); + } else { + return memoryBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public Builder setMemory( + int index, org.tensorflow.framework.AllocatorMemoryUsed value) { + if (memoryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMemoryIsMutable(); + memory_.set(index, value); + onChanged(); + } else { + memoryBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public Builder setMemory( + int index, org.tensorflow.framework.AllocatorMemoryUsed.Builder builderForValue) { + if (memoryBuilder_ == null) { + ensureMemoryIsMutable(); + memory_.set(index, builderForValue.build()); + onChanged(); + } else { + memoryBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public Builder addMemory(org.tensorflow.framework.AllocatorMemoryUsed value) { + if (memoryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMemoryIsMutable(); + memory_.add(value); + onChanged(); + } else { + memoryBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public Builder addMemory( + int index, org.tensorflow.framework.AllocatorMemoryUsed value) { + if (memoryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMemoryIsMutable(); + memory_.add(index, value); + onChanged(); + } else { + memoryBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public Builder addMemory( + org.tensorflow.framework.AllocatorMemoryUsed.Builder builderForValue) { + if (memoryBuilder_ == null) { + ensureMemoryIsMutable(); + memory_.add(builderForValue.build()); + onChanged(); + } else { + memoryBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public Builder addMemory( + int index, org.tensorflow.framework.AllocatorMemoryUsed.Builder builderForValue) { + if (memoryBuilder_ == null) { + ensureMemoryIsMutable(); + memory_.add(index, builderForValue.build()); + onChanged(); + } else { + memoryBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public Builder addAllMemory( + java.lang.Iterable values) { + if (memoryBuilder_ == null) { + ensureMemoryIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, memory_); + onChanged(); + } else { + memoryBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public Builder clearMemory() { + if (memoryBuilder_ == null) { + memory_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + } else { + memoryBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public Builder removeMemory(int index) { + if (memoryBuilder_ == null) { + ensureMemoryIsMutable(); + memory_.remove(index); + onChanged(); + } else { + memoryBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public org.tensorflow.framework.AllocatorMemoryUsed.Builder getMemoryBuilder( + int index) { + return getMemoryFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public org.tensorflow.framework.AllocatorMemoryUsedOrBuilder getMemoryOrBuilder( + int index) { + if (memoryBuilder_ == null) { + return memory_.get(index); } else { + return memoryBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public java.util.List + getMemoryOrBuilderList() { + if (memoryBuilder_ != null) { + return memoryBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(memory_); + } + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public org.tensorflow.framework.AllocatorMemoryUsed.Builder addMemoryBuilder() { + return getMemoryFieldBuilder().addBuilder( + org.tensorflow.framework.AllocatorMemoryUsed.getDefaultInstance()); + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public org.tensorflow.framework.AllocatorMemoryUsed.Builder addMemoryBuilder( + int index) { + return getMemoryFieldBuilder().addBuilder( + index, org.tensorflow.framework.AllocatorMemoryUsed.getDefaultInstance()); + } + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + public java.util.List + getMemoryBuilderList() { + return getMemoryFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.AllocatorMemoryUsed, org.tensorflow.framework.AllocatorMemoryUsed.Builder, org.tensorflow.framework.AllocatorMemoryUsedOrBuilder> + getMemoryFieldBuilder() { + if (memoryBuilder_ == null) { + memoryBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.AllocatorMemoryUsed, org.tensorflow.framework.AllocatorMemoryUsed.Builder, org.tensorflow.framework.AllocatorMemoryUsedOrBuilder>( + memory_, + ((bitField0_ & 0x00000020) == 0x00000020), + getParentForChildren(), + isClean()); + memory_ = null; + } + return memoryBuilder_; + } + + private java.util.List output_ = + java.util.Collections.emptyList(); + private void ensureOutputIsMutable() { + if (!((bitField0_ & 0x00000040) == 0x00000040)) { + output_ = new java.util.ArrayList(output_); + bitField0_ |= 0x00000040; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeOutput, org.tensorflow.framework.NodeOutput.Builder, org.tensorflow.framework.NodeOutputOrBuilder> outputBuilder_; + + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public java.util.List getOutputList() { + if (outputBuilder_ == null) { + return java.util.Collections.unmodifiableList(output_); + } else { + return outputBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public int getOutputCount() { + if (outputBuilder_ == null) { + return output_.size(); + } else { + return outputBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public org.tensorflow.framework.NodeOutput getOutput(int index) { + if (outputBuilder_ == null) { + return output_.get(index); + } else { + return outputBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public Builder setOutput( + int index, org.tensorflow.framework.NodeOutput value) { + if (outputBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputIsMutable(); + output_.set(index, value); + onChanged(); + } else { + outputBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public Builder setOutput( + int index, org.tensorflow.framework.NodeOutput.Builder builderForValue) { + if (outputBuilder_ == null) { + ensureOutputIsMutable(); + output_.set(index, builderForValue.build()); + onChanged(); + } else { + outputBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public Builder addOutput(org.tensorflow.framework.NodeOutput value) { + if (outputBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputIsMutable(); + output_.add(value); + onChanged(); + } else { + outputBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public Builder addOutput( + int index, org.tensorflow.framework.NodeOutput value) { + if (outputBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputIsMutable(); + output_.add(index, value); + onChanged(); + } else { + outputBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public Builder addOutput( + org.tensorflow.framework.NodeOutput.Builder builderForValue) { + if (outputBuilder_ == null) { + ensureOutputIsMutable(); + output_.add(builderForValue.build()); + onChanged(); + } else { + outputBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public Builder addOutput( + int index, org.tensorflow.framework.NodeOutput.Builder builderForValue) { + if (outputBuilder_ == null) { + ensureOutputIsMutable(); + output_.add(index, builderForValue.build()); + onChanged(); + } else { + outputBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public Builder addAllOutput( + java.lang.Iterable values) { + if (outputBuilder_ == null) { + ensureOutputIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, output_); + onChanged(); + } else { + outputBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public Builder clearOutput() { + if (outputBuilder_ == null) { + output_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + } else { + outputBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public Builder removeOutput(int index) { + if (outputBuilder_ == null) { + ensureOutputIsMutable(); + output_.remove(index); + onChanged(); + } else { + outputBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public org.tensorflow.framework.NodeOutput.Builder getOutputBuilder( + int index) { + return getOutputFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public org.tensorflow.framework.NodeOutputOrBuilder getOutputOrBuilder( + int index) { + if (outputBuilder_ == null) { + return output_.get(index); } else { + return outputBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public java.util.List + getOutputOrBuilderList() { + if (outputBuilder_ != null) { + return outputBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(output_); + } + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public org.tensorflow.framework.NodeOutput.Builder addOutputBuilder() { + return getOutputFieldBuilder().addBuilder( + org.tensorflow.framework.NodeOutput.getDefaultInstance()); + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public org.tensorflow.framework.NodeOutput.Builder addOutputBuilder( + int index) { + return getOutputFieldBuilder().addBuilder( + index, org.tensorflow.framework.NodeOutput.getDefaultInstance()); + } + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + public java.util.List + getOutputBuilderList() { + return getOutputFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeOutput, org.tensorflow.framework.NodeOutput.Builder, org.tensorflow.framework.NodeOutputOrBuilder> + getOutputFieldBuilder() { + if (outputBuilder_ == null) { + outputBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.NodeOutput, org.tensorflow.framework.NodeOutput.Builder, org.tensorflow.framework.NodeOutputOrBuilder>( + output_, + ((bitField0_ & 0x00000040) == 0x00000040), + getParentForChildren(), + isClean()); + output_ = null; + } + return outputBuilder_; + } + + private java.lang.Object timelineLabel_ = ""; + /** + * optional string timeline_label = 8; + */ + public java.lang.String getTimelineLabel() { + java.lang.Object ref = timelineLabel_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + timelineLabel_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string timeline_label = 8; + */ + public com.google.protobuf.ByteString + getTimelineLabelBytes() { + java.lang.Object ref = timelineLabel_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + timelineLabel_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string timeline_label = 8; + */ + public Builder setTimelineLabel( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + timelineLabel_ = value; + onChanged(); + return this; + } + /** + * optional string timeline_label = 8; + */ + public Builder clearTimelineLabel() { + + timelineLabel_ = getDefaultInstance().getTimelineLabel(); + onChanged(); + return this; + } + /** + * optional string timeline_label = 8; + */ + public Builder setTimelineLabelBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + timelineLabel_ = value; + onChanged(); + return this; + } + + private long scheduledMicros_ ; + /** + * optional int64 scheduled_micros = 9; + */ + public long getScheduledMicros() { + return scheduledMicros_; + } + /** + * optional int64 scheduled_micros = 9; + */ + public Builder setScheduledMicros(long value) { + + scheduledMicros_ = value; + onChanged(); + return this; + } + /** + * optional int64 scheduled_micros = 9; + */ + public Builder clearScheduledMicros() { + + scheduledMicros_ = 0L; + onChanged(); + return this; + } + + private int threadId_ ; + /** + * optional uint32 thread_id = 10; + */ + public int getThreadId() { + return threadId_; + } + /** + * optional uint32 thread_id = 10; + */ + public Builder setThreadId(int value) { + + threadId_ = value; + onChanged(); + return this; + } + /** + * optional uint32 thread_id = 10; + */ + public Builder clearThreadId() { + + threadId_ = 0; + onChanged(); + return this; + } + + private java.util.List referencedTensor_ = + java.util.Collections.emptyList(); + private void ensureReferencedTensorIsMutable() { + if (!((bitField0_ & 0x00000400) == 0x00000400)) { + referencedTensor_ = new java.util.ArrayList(referencedTensor_); + bitField0_ |= 0x00000400; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.AllocationDescription, org.tensorflow.framework.AllocationDescription.Builder, org.tensorflow.framework.AllocationDescriptionOrBuilder> referencedTensorBuilder_; + + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public java.util.List getReferencedTensorList() { + if (referencedTensorBuilder_ == null) { + return java.util.Collections.unmodifiableList(referencedTensor_); + } else { + return referencedTensorBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public int getReferencedTensorCount() { + if (referencedTensorBuilder_ == null) { + return referencedTensor_.size(); + } else { + return referencedTensorBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public org.tensorflow.framework.AllocationDescription getReferencedTensor(int index) { + if (referencedTensorBuilder_ == null) { + return referencedTensor_.get(index); + } else { + return referencedTensorBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public Builder setReferencedTensor( + int index, org.tensorflow.framework.AllocationDescription value) { + if (referencedTensorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureReferencedTensorIsMutable(); + referencedTensor_.set(index, value); + onChanged(); + } else { + referencedTensorBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public Builder setReferencedTensor( + int index, org.tensorflow.framework.AllocationDescription.Builder builderForValue) { + if (referencedTensorBuilder_ == null) { + ensureReferencedTensorIsMutable(); + referencedTensor_.set(index, builderForValue.build()); + onChanged(); + } else { + referencedTensorBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public Builder addReferencedTensor(org.tensorflow.framework.AllocationDescription value) { + if (referencedTensorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureReferencedTensorIsMutable(); + referencedTensor_.add(value); + onChanged(); + } else { + referencedTensorBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public Builder addReferencedTensor( + int index, org.tensorflow.framework.AllocationDescription value) { + if (referencedTensorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureReferencedTensorIsMutable(); + referencedTensor_.add(index, value); + onChanged(); + } else { + referencedTensorBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public Builder addReferencedTensor( + org.tensorflow.framework.AllocationDescription.Builder builderForValue) { + if (referencedTensorBuilder_ == null) { + ensureReferencedTensorIsMutable(); + referencedTensor_.add(builderForValue.build()); + onChanged(); + } else { + referencedTensorBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public Builder addReferencedTensor( + int index, org.tensorflow.framework.AllocationDescription.Builder builderForValue) { + if (referencedTensorBuilder_ == null) { + ensureReferencedTensorIsMutable(); + referencedTensor_.add(index, builderForValue.build()); + onChanged(); + } else { + referencedTensorBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public Builder addAllReferencedTensor( + java.lang.Iterable values) { + if (referencedTensorBuilder_ == null) { + ensureReferencedTensorIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, referencedTensor_); + onChanged(); + } else { + referencedTensorBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public Builder clearReferencedTensor() { + if (referencedTensorBuilder_ == null) { + referencedTensor_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000400); + onChanged(); + } else { + referencedTensorBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public Builder removeReferencedTensor(int index) { + if (referencedTensorBuilder_ == null) { + ensureReferencedTensorIsMutable(); + referencedTensor_.remove(index); + onChanged(); + } else { + referencedTensorBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public org.tensorflow.framework.AllocationDescription.Builder getReferencedTensorBuilder( + int index) { + return getReferencedTensorFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public org.tensorflow.framework.AllocationDescriptionOrBuilder getReferencedTensorOrBuilder( + int index) { + if (referencedTensorBuilder_ == null) { + return referencedTensor_.get(index); } else { + return referencedTensorBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public java.util.List + getReferencedTensorOrBuilderList() { + if (referencedTensorBuilder_ != null) { + return referencedTensorBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(referencedTensor_); + } + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public org.tensorflow.framework.AllocationDescription.Builder addReferencedTensorBuilder() { + return getReferencedTensorFieldBuilder().addBuilder( + org.tensorflow.framework.AllocationDescription.getDefaultInstance()); + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public org.tensorflow.framework.AllocationDescription.Builder addReferencedTensorBuilder( + int index) { + return getReferencedTensorFieldBuilder().addBuilder( + index, org.tensorflow.framework.AllocationDescription.getDefaultInstance()); + } + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + public java.util.List + getReferencedTensorBuilderList() { + return getReferencedTensorFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.AllocationDescription, org.tensorflow.framework.AllocationDescription.Builder, org.tensorflow.framework.AllocationDescriptionOrBuilder> + getReferencedTensorFieldBuilder() { + if (referencedTensorBuilder_ == null) { + referencedTensorBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.AllocationDescription, org.tensorflow.framework.AllocationDescription.Builder, org.tensorflow.framework.AllocationDescriptionOrBuilder>( + referencedTensor_, + ((bitField0_ & 0x00000400) == 0x00000400), + getParentForChildren(), + isClean()); + referencedTensor_ = null; + } + return referencedTensorBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.NodeExecStats) + } + + // @@protoc_insertion_point(class_scope:tensorflow.NodeExecStats) + private static final org.tensorflow.framework.NodeExecStats DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.NodeExecStats(); + } + + public static org.tensorflow.framework.NodeExecStats getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NodeExecStats parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NodeExecStats(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.NodeExecStats getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeExecStatsOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeExecStatsOrBuilder.java new file mode 100644 index 0000000000000..539810898ac1e --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeExecStatsOrBuilder.java @@ -0,0 +1,145 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +public interface NodeExecStatsOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.NodeExecStats) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * TODO(tucker): Use some more compact form of node identity than
+   * the full string name.  Either all processes should agree on a
+   * global id (cost_id?) for each node, or we should use a hash of
+   * the name.
+   * 
+ * + * optional string node_name = 1; + */ + java.lang.String getNodeName(); + /** + *
+   * TODO(tucker): Use some more compact form of node identity than
+   * the full string name.  Either all processes should agree on a
+   * global id (cost_id?) for each node, or we should use a hash of
+   * the name.
+   * 
+ * + * optional string node_name = 1; + */ + com.google.protobuf.ByteString + getNodeNameBytes(); + + /** + * optional int64 all_start_micros = 2; + */ + long getAllStartMicros(); + + /** + * optional int64 op_start_rel_micros = 3; + */ + long getOpStartRelMicros(); + + /** + * optional int64 op_end_rel_micros = 4; + */ + long getOpEndRelMicros(); + + /** + * optional int64 all_end_rel_micros = 5; + */ + long getAllEndRelMicros(); + + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + java.util.List + getMemoryList(); + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + org.tensorflow.framework.AllocatorMemoryUsed getMemory(int index); + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + int getMemoryCount(); + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + java.util.List + getMemoryOrBuilderList(); + /** + * repeated .tensorflow.AllocatorMemoryUsed memory = 6; + */ + org.tensorflow.framework.AllocatorMemoryUsedOrBuilder getMemoryOrBuilder( + int index); + + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + java.util.List + getOutputList(); + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + org.tensorflow.framework.NodeOutput getOutput(int index); + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + int getOutputCount(); + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + java.util.List + getOutputOrBuilderList(); + /** + * repeated .tensorflow.NodeOutput output = 7; + */ + org.tensorflow.framework.NodeOutputOrBuilder getOutputOrBuilder( + int index); + + /** + * optional string timeline_label = 8; + */ + java.lang.String getTimelineLabel(); + /** + * optional string timeline_label = 8; + */ + com.google.protobuf.ByteString + getTimelineLabelBytes(); + + /** + * optional int64 scheduled_micros = 9; + */ + long getScheduledMicros(); + + /** + * optional uint32 thread_id = 10; + */ + int getThreadId(); + + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + java.util.List + getReferencedTensorList(); + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + org.tensorflow.framework.AllocationDescription getReferencedTensor(int index); + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + int getReferencedTensorCount(); + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + java.util.List + getReferencedTensorOrBuilderList(); + /** + * repeated .tensorflow.AllocationDescription referenced_tensor = 11; + */ + org.tensorflow.framework.AllocationDescriptionOrBuilder getReferencedTensorOrBuilder( + int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeOutput.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeOutput.java new file mode 100644 index 0000000000000..ace88c2ccff3c --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeOutput.java @@ -0,0 +1,614 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +/** + *
+ * Output sizes recorded for a single execution of a graph node.
+ * 
+ * + * Protobuf type {@code tensorflow.NodeOutput} + */ +public final class NodeOutput extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.NodeOutput) + NodeOutputOrBuilder { + // Use NodeOutput.newBuilder() to construct. + private NodeOutput(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private NodeOutput() { + slot_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private NodeOutput( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + slot_ = input.readInt32(); + break; + } + case 26: { + org.tensorflow.framework.TensorDescription.Builder subBuilder = null; + if (tensorDescription_ != null) { + subBuilder = tensorDescription_.toBuilder(); + } + tensorDescription_ = input.readMessage(org.tensorflow.framework.TensorDescription.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(tensorDescription_); + tensorDescription_ = subBuilder.buildPartial(); + } + + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeOutput_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeOutput_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.NodeOutput.class, org.tensorflow.framework.NodeOutput.Builder.class); + } + + public static final int SLOT_FIELD_NUMBER = 1; + private int slot_; + /** + * optional int32 slot = 1; + */ + public int getSlot() { + return slot_; + } + + public static final int TENSOR_DESCRIPTION_FIELD_NUMBER = 3; + private org.tensorflow.framework.TensorDescription tensorDescription_; + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public boolean hasTensorDescription() { + return tensorDescription_ != null; + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public org.tensorflow.framework.TensorDescription getTensorDescription() { + return tensorDescription_ == null ? org.tensorflow.framework.TensorDescription.getDefaultInstance() : tensorDescription_; + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public org.tensorflow.framework.TensorDescriptionOrBuilder getTensorDescriptionOrBuilder() { + return getTensorDescription(); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (slot_ != 0) { + output.writeInt32(1, slot_); + } + if (tensorDescription_ != null) { + output.writeMessage(3, getTensorDescription()); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (slot_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, slot_); + } + if (tensorDescription_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getTensorDescription()); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.NodeOutput)) { + return super.equals(obj); + } + org.tensorflow.framework.NodeOutput other = (org.tensorflow.framework.NodeOutput) obj; + + boolean result = true; + result = result && (getSlot() + == other.getSlot()); + result = result && (hasTensorDescription() == other.hasTensorDescription()); + if (hasTensorDescription()) { + result = result && getTensorDescription() + .equals(other.getTensorDescription()); + } + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + SLOT_FIELD_NUMBER; + hash = (53 * hash) + getSlot(); + if (hasTensorDescription()) { + hash = (37 * hash) + TENSOR_DESCRIPTION_FIELD_NUMBER; + hash = (53 * hash) + getTensorDescription().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.NodeOutput parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.NodeOutput parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.NodeOutput parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.NodeOutput parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.NodeOutput parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NodeOutput parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.NodeOutput parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NodeOutput parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.NodeOutput parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.NodeOutput parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.NodeOutput prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Output sizes recorded for a single execution of a graph node.
+   * 
+ * + * Protobuf type {@code tensorflow.NodeOutput} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.NodeOutput) + org.tensorflow.framework.NodeOutputOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeOutput_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeOutput_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.NodeOutput.class, org.tensorflow.framework.NodeOutput.Builder.class); + } + + // Construct using org.tensorflow.framework.NodeOutput.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + slot_ = 0; + + if (tensorDescriptionBuilder_ == null) { + tensorDescription_ = null; + } else { + tensorDescription_ = null; + tensorDescriptionBuilder_ = null; + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_NodeOutput_descriptor; + } + + public org.tensorflow.framework.NodeOutput getDefaultInstanceForType() { + return org.tensorflow.framework.NodeOutput.getDefaultInstance(); + } + + public org.tensorflow.framework.NodeOutput build() { + org.tensorflow.framework.NodeOutput result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.NodeOutput buildPartial() { + org.tensorflow.framework.NodeOutput result = new org.tensorflow.framework.NodeOutput(this); + result.slot_ = slot_; + if (tensorDescriptionBuilder_ == null) { + result.tensorDescription_ = tensorDescription_; + } else { + result.tensorDescription_ = tensorDescriptionBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.NodeOutput) { + return mergeFrom((org.tensorflow.framework.NodeOutput)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.NodeOutput other) { + if (other == org.tensorflow.framework.NodeOutput.getDefaultInstance()) return this; + if (other.getSlot() != 0) { + setSlot(other.getSlot()); + } + if (other.hasTensorDescription()) { + mergeTensorDescription(other.getTensorDescription()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.NodeOutput parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.NodeOutput) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int slot_ ; + /** + * optional int32 slot = 1; + */ + public int getSlot() { + return slot_; + } + /** + * optional int32 slot = 1; + */ + public Builder setSlot(int value) { + + slot_ = value; + onChanged(); + return this; + } + /** + * optional int32 slot = 1; + */ + public Builder clearSlot() { + + slot_ = 0; + onChanged(); + return this; + } + + private org.tensorflow.framework.TensorDescription tensorDescription_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorDescription, org.tensorflow.framework.TensorDescription.Builder, org.tensorflow.framework.TensorDescriptionOrBuilder> tensorDescriptionBuilder_; + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public boolean hasTensorDescription() { + return tensorDescriptionBuilder_ != null || tensorDescription_ != null; + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public org.tensorflow.framework.TensorDescription getTensorDescription() { + if (tensorDescriptionBuilder_ == null) { + return tensorDescription_ == null ? org.tensorflow.framework.TensorDescription.getDefaultInstance() : tensorDescription_; + } else { + return tensorDescriptionBuilder_.getMessage(); + } + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public Builder setTensorDescription(org.tensorflow.framework.TensorDescription value) { + if (tensorDescriptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tensorDescription_ = value; + onChanged(); + } else { + tensorDescriptionBuilder_.setMessage(value); + } + + return this; + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public Builder setTensorDescription( + org.tensorflow.framework.TensorDescription.Builder builderForValue) { + if (tensorDescriptionBuilder_ == null) { + tensorDescription_ = builderForValue.build(); + onChanged(); + } else { + tensorDescriptionBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public Builder mergeTensorDescription(org.tensorflow.framework.TensorDescription value) { + if (tensorDescriptionBuilder_ == null) { + if (tensorDescription_ != null) { + tensorDescription_ = + org.tensorflow.framework.TensorDescription.newBuilder(tensorDescription_).mergeFrom(value).buildPartial(); + } else { + tensorDescription_ = value; + } + onChanged(); + } else { + tensorDescriptionBuilder_.mergeFrom(value); + } + + return this; + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public Builder clearTensorDescription() { + if (tensorDescriptionBuilder_ == null) { + tensorDescription_ = null; + onChanged(); + } else { + tensorDescription_ = null; + tensorDescriptionBuilder_ = null; + } + + return this; + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public org.tensorflow.framework.TensorDescription.Builder getTensorDescriptionBuilder() { + + onChanged(); + return getTensorDescriptionFieldBuilder().getBuilder(); + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + public org.tensorflow.framework.TensorDescriptionOrBuilder getTensorDescriptionOrBuilder() { + if (tensorDescriptionBuilder_ != null) { + return tensorDescriptionBuilder_.getMessageOrBuilder(); + } else { + return tensorDescription_ == null ? + org.tensorflow.framework.TensorDescription.getDefaultInstance() : tensorDescription_; + } + } + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorDescription, org.tensorflow.framework.TensorDescription.Builder, org.tensorflow.framework.TensorDescriptionOrBuilder> + getTensorDescriptionFieldBuilder() { + if (tensorDescriptionBuilder_ == null) { + tensorDescriptionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.TensorDescription, org.tensorflow.framework.TensorDescription.Builder, org.tensorflow.framework.TensorDescriptionOrBuilder>( + getTensorDescription(), + getParentForChildren(), + isClean()); + tensorDescription_ = null; + } + return tensorDescriptionBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.NodeOutput) + } + + // @@protoc_insertion_point(class_scope:tensorflow.NodeOutput) + private static final org.tensorflow.framework.NodeOutput DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.NodeOutput(); + } + + public static org.tensorflow.framework.NodeOutput getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public NodeOutput parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new NodeOutput(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.NodeOutput getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeOutputOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeOutputOrBuilder.java new file mode 100644 index 0000000000000..38f2fae46525a --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeOutputOrBuilder.java @@ -0,0 +1,27 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +public interface NodeOutputOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.NodeOutput) + com.google.protobuf.MessageOrBuilder { + + /** + * optional int32 slot = 1; + */ + int getSlot(); + + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + boolean hasTensorDescription(); + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + org.tensorflow.framework.TensorDescription getTensorDescription(); + /** + * optional .tensorflow.TensorDescription tensor_description = 3; + */ + org.tensorflow.framework.TensorDescriptionOrBuilder getTensorDescriptionOrBuilder(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeProto.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeProto.java new file mode 100644 index 0000000000000..77284db9410bf --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/NodeProto.java @@ -0,0 +1,75 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/node_def.proto + +package org.tensorflow.framework; + +public final class NodeProto { + private NodeProto() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_NodeDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_NodeDef_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_NodeDef_AttrEntry_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_NodeDef_AttrEntry_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n(tensorflow/core/framework/node_def.pro" + + "to\022\ntensorflow\032*tensorflow/core/framewor" + + "k/attr_value.proto\"\263\001\n\007NodeDef\022\014\n\004name\030\001" + + " \001(\t\022\n\n\002op\030\002 \001(\t\022\r\n\005input\030\003 \003(\t\022\016\n\006devic" + + "e\030\004 \001(\t\022+\n\004attr\030\005 \003(\0132\035.tensorflow.NodeD" + + "ef.AttrEntry\032B\n\tAttrEntry\022\013\n\003key\030\001 \001(\t\022$" + + "\n\005value\030\002 \001(\0132\025.tensorflow.AttrValue:\0028\001" + + "B*\n\030org.tensorflow.frameworkB\tNodeProtoP" + + "\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.AttrValueProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_NodeDef_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_NodeDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_NodeDef_descriptor, + new java.lang.String[] { "Name", "Op", "Input", "Device", "Attr", }); + internal_static_tensorflow_NodeDef_AttrEntry_descriptor = + internal_static_tensorflow_NodeDef_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_NodeDef_AttrEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_NodeDef_AttrEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + org.tensorflow.framework.AttrValueProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDef.java new file mode 100644 index 0000000000000..40aff4eb45605 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDef.java @@ -0,0 +1,5878 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/op_def.proto + +package org.tensorflow.framework; + +/** + *
+ * Defines an operation. A NodeDef in a GraphDef specifies an Op by
+ * using the "op" field which should match the name of a OpDef.
+ * 
+ * + * Protobuf type {@code tensorflow.OpDef} + */ +public final class OpDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.OpDef) + OpDefOrBuilder { + // Use OpDef.newBuilder() to construct. + private OpDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private OpDef() { + name_ = ""; + inputArg_ = java.util.Collections.emptyList(); + outputArg_ = java.util.Collections.emptyList(); + attr_ = java.util.Collections.emptyList(); + summary_ = ""; + description_ = ""; + isCommutative_ = false; + isAggregate_ = false; + isStateful_ = false; + allowsUninitializedInput_ = false; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private OpDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + inputArg_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + inputArg_.add( + input.readMessage(org.tensorflow.framework.OpDef.ArgDef.parser(), extensionRegistry)); + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + outputArg_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + outputArg_.add( + input.readMessage(org.tensorflow.framework.OpDef.ArgDef.parser(), extensionRegistry)); + break; + } + case 34: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + attr_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + attr_.add( + input.readMessage(org.tensorflow.framework.OpDef.AttrDef.parser(), extensionRegistry)); + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + summary_ = s; + break; + } + case 50: { + java.lang.String s = input.readStringRequireUtf8(); + + description_ = s; + break; + } + case 66: { + org.tensorflow.framework.OpDeprecation.Builder subBuilder = null; + if (deprecation_ != null) { + subBuilder = deprecation_.toBuilder(); + } + deprecation_ = input.readMessage(org.tensorflow.framework.OpDeprecation.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(deprecation_); + deprecation_ = subBuilder.buildPartial(); + } + + break; + } + case 128: { + + isAggregate_ = input.readBool(); + break; + } + case 136: { + + isStateful_ = input.readBool(); + break; + } + case 144: { + + isCommutative_ = input.readBool(); + break; + } + case 152: { + + allowsUninitializedInput_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + inputArg_ = java.util.Collections.unmodifiableList(inputArg_); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + outputArg_ = java.util.Collections.unmodifiableList(outputArg_); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + attr_ = java.util.Collections.unmodifiableList(attr_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpDef.class, org.tensorflow.framework.OpDef.Builder.class); + } + + public interface ArgDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.OpDef.ArgDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Name for the input/output.  Should match the regexp "[a-z][a-z0-9_]*".
+     * 
+ * + * optional string name = 1; + */ + java.lang.String getName(); + /** + *
+     * Name for the input/output.  Should match the regexp "[a-z][a-z0-9_]*".
+     * 
+ * + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+     * Human readable description.
+     * 
+ * + * optional string description = 2; + */ + java.lang.String getDescription(); + /** + *
+     * Human readable description.
+     * 
+ * + * optional string description = 2; + */ + com.google.protobuf.ByteString + getDescriptionBytes(); + + /** + *
+     * Describes the type of one or more tensors that are accepted/produced
+     * by this input/output arg.  The only legal combinations are:
+     * * For a single tensor: either the "type" field is set or the
+     *   "type_attr" field is set to the name of an attr with type "type".
+     * * For a sequence of tensors with the same type: the "number_attr"
+     *   field will be set to the name of an attr with type "int", and
+     *   either the "type" or "type_attr" field will be set as for
+     *   single tensors.
+     * * For a sequence of tensors, the "type_list_attr" field will be set
+     *   to the name of an attr with type "list(type)".
+     * 
+ * + * optional .tensorflow.DataType type = 3; + */ + int getTypeValue(); + /** + *
+     * Describes the type of one or more tensors that are accepted/produced
+     * by this input/output arg.  The only legal combinations are:
+     * * For a single tensor: either the "type" field is set or the
+     *   "type_attr" field is set to the name of an attr with type "type".
+     * * For a sequence of tensors with the same type: the "number_attr"
+     *   field will be set to the name of an attr with type "int", and
+     *   either the "type" or "type_attr" field will be set as for
+     *   single tensors.
+     * * For a sequence of tensors, the "type_list_attr" field will be set
+     *   to the name of an attr with type "list(type)".
+     * 
+ * + * optional .tensorflow.DataType type = 3; + */ + org.tensorflow.framework.DataType getType(); + + /** + *
+     * if specified, attr must have type "type"
+     * 
+ * + * optional string type_attr = 4; + */ + java.lang.String getTypeAttr(); + /** + *
+     * if specified, attr must have type "type"
+     * 
+ * + * optional string type_attr = 4; + */ + com.google.protobuf.ByteString + getTypeAttrBytes(); + + /** + *
+     * if specified, attr must have type "int"
+     * 
+ * + * optional string number_attr = 5; + */ + java.lang.String getNumberAttr(); + /** + *
+     * if specified, attr must have type "int"
+     * 
+ * + * optional string number_attr = 5; + */ + com.google.protobuf.ByteString + getNumberAttrBytes(); + + /** + *
+     * If specified, attr must have type "list(type)", and none of
+     * type, type_attr, and number_attr may be specified.
+     * 
+ * + * optional string type_list_attr = 6; + */ + java.lang.String getTypeListAttr(); + /** + *
+     * If specified, attr must have type "list(type)", and none of
+     * type, type_attr, and number_attr may be specified.
+     * 
+ * + * optional string type_list_attr = 6; + */ + com.google.protobuf.ByteString + getTypeListAttrBytes(); + + /** + *
+     * For inputs: if true, the inputs are required to be refs.
+     *   By default, inputs can be either refs or non-refs.
+     * For outputs: if true, outputs are refs, otherwise they are not.
+     * 
+ * + * optional bool is_ref = 16; + */ + boolean getIsRef(); + } + /** + *
+   * For describing inputs and outputs.
+   * 
+ * + * Protobuf type {@code tensorflow.OpDef.ArgDef} + */ + public static final class ArgDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.OpDef.ArgDef) + ArgDefOrBuilder { + // Use ArgDef.newBuilder() to construct. + private ArgDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ArgDef() { + name_ = ""; + description_ = ""; + type_ = 0; + typeAttr_ = ""; + numberAttr_ = ""; + typeListAttr_ = ""; + isRef_ = false; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private ArgDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + description_ = s; + break; + } + case 24: { + int rawValue = input.readEnum(); + + type_ = rawValue; + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + typeAttr_ = s; + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + numberAttr_ = s; + break; + } + case 50: { + java.lang.String s = input.readStringRequireUtf8(); + + typeListAttr_ = s; + break; + } + case 128: { + + isRef_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_ArgDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_ArgDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpDef.ArgDef.class, org.tensorflow.framework.OpDef.ArgDef.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+     * Name for the input/output.  Should match the regexp "[a-z][a-z0-9_]*".
+     * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * Name for the input/output.  Should match the regexp "[a-z][a-z0-9_]*".
+     * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DESCRIPTION_FIELD_NUMBER = 2; + private volatile java.lang.Object description_; + /** + *
+     * Human readable description.
+     * 
+ * + * optional string description = 2; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } + } + /** + *
+     * Human readable description.
+     * 
+ * + * optional string description = 2; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TYPE_FIELD_NUMBER = 3; + private int type_; + /** + *
+     * Describes the type of one or more tensors that are accepted/produced
+     * by this input/output arg.  The only legal combinations are:
+     * * For a single tensor: either the "type" field is set or the
+     *   "type_attr" field is set to the name of an attr with type "type".
+     * * For a sequence of tensors with the same type: the "number_attr"
+     *   field will be set to the name of an attr with type "int", and
+     *   either the "type" or "type_attr" field will be set as for
+     *   single tensors.
+     * * For a sequence of tensors, the "type_list_attr" field will be set
+     *   to the name of an attr with type "list(type)".
+     * 
+ * + * optional .tensorflow.DataType type = 3; + */ + public int getTypeValue() { + return type_; + } + /** + *
+     * Describes the type of one or more tensors that are accepted/produced
+     * by this input/output arg.  The only legal combinations are:
+     * * For a single tensor: either the "type" field is set or the
+     *   "type_attr" field is set to the name of an attr with type "type".
+     * * For a sequence of tensors with the same type: the "number_attr"
+     *   field will be set to the name of an attr with type "int", and
+     *   either the "type" or "type_attr" field will be set as for
+     *   single tensors.
+     * * For a sequence of tensors, the "type_list_attr" field will be set
+     *   to the name of an attr with type "list(type)".
+     * 
+ * + * optional .tensorflow.DataType type = 3; + */ + public org.tensorflow.framework.DataType getType() { + org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf(type_); + return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result; + } + + public static final int TYPE_ATTR_FIELD_NUMBER = 4; + private volatile java.lang.Object typeAttr_; + /** + *
+     * if specified, attr must have type "type"
+     * 
+ * + * optional string type_attr = 4; + */ + public java.lang.String getTypeAttr() { + java.lang.Object ref = typeAttr_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + typeAttr_ = s; + return s; + } + } + /** + *
+     * if specified, attr must have type "type"
+     * 
+ * + * optional string type_attr = 4; + */ + public com.google.protobuf.ByteString + getTypeAttrBytes() { + java.lang.Object ref = typeAttr_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + typeAttr_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int NUMBER_ATTR_FIELD_NUMBER = 5; + private volatile java.lang.Object numberAttr_; + /** + *
+     * if specified, attr must have type "int"
+     * 
+ * + * optional string number_attr = 5; + */ + public java.lang.String getNumberAttr() { + java.lang.Object ref = numberAttr_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + numberAttr_ = s; + return s; + } + } + /** + *
+     * if specified, attr must have type "int"
+     * 
+ * + * optional string number_attr = 5; + */ + public com.google.protobuf.ByteString + getNumberAttrBytes() { + java.lang.Object ref = numberAttr_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + numberAttr_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TYPE_LIST_ATTR_FIELD_NUMBER = 6; + private volatile java.lang.Object typeListAttr_; + /** + *
+     * If specified, attr must have type "list(type)", and none of
+     * type, type_attr, and number_attr may be specified.
+     * 
+ * + * optional string type_list_attr = 6; + */ + public java.lang.String getTypeListAttr() { + java.lang.Object ref = typeListAttr_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + typeListAttr_ = s; + return s; + } + } + /** + *
+     * If specified, attr must have type "list(type)", and none of
+     * type, type_attr, and number_attr may be specified.
+     * 
+ * + * optional string type_list_attr = 6; + */ + public com.google.protobuf.ByteString + getTypeListAttrBytes() { + java.lang.Object ref = typeListAttr_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + typeListAttr_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int IS_REF_FIELD_NUMBER = 16; + private boolean isRef_; + /** + *
+     * For inputs: if true, the inputs are required to be refs.
+     *   By default, inputs can be either refs or non-refs.
+     * For outputs: if true, outputs are refs, otherwise they are not.
+     * 
+ * + * optional bool is_ref = 16; + */ + public boolean getIsRef() { + return isRef_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (!getDescriptionBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, description_); + } + if (type_ != org.tensorflow.framework.DataType.DT_INVALID.getNumber()) { + output.writeEnum(3, type_); + } + if (!getTypeAttrBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, typeAttr_); + } + if (!getNumberAttrBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, numberAttr_); + } + if (!getTypeListAttrBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, typeListAttr_); + } + if (isRef_ != false) { + output.writeBool(16, isRef_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (!getDescriptionBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, description_); + } + if (type_ != org.tensorflow.framework.DataType.DT_INVALID.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(3, type_); + } + if (!getTypeAttrBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, typeAttr_); + } + if (!getNumberAttrBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, numberAttr_); + } + if (!getTypeListAttrBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, typeListAttr_); + } + if (isRef_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(16, isRef_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.OpDef.ArgDef)) { + return super.equals(obj); + } + org.tensorflow.framework.OpDef.ArgDef other = (org.tensorflow.framework.OpDef.ArgDef) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && getDescription() + .equals(other.getDescription()); + result = result && type_ == other.type_; + result = result && getTypeAttr() + .equals(other.getTypeAttr()); + result = result && getNumberAttr() + .equals(other.getNumberAttr()); + result = result && getTypeListAttr() + .equals(other.getTypeListAttr()); + result = result && (getIsRef() + == other.getIsRef()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; + hash = (53 * hash) + getDescription().hashCode(); + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + type_; + hash = (37 * hash) + TYPE_ATTR_FIELD_NUMBER; + hash = (53 * hash) + getTypeAttr().hashCode(); + hash = (37 * hash) + NUMBER_ATTR_FIELD_NUMBER; + hash = (53 * hash) + getNumberAttr().hashCode(); + hash = (37 * hash) + TYPE_LIST_ATTR_FIELD_NUMBER; + hash = (53 * hash) + getTypeListAttr().hashCode(); + hash = (37 * hash) + IS_REF_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIsRef()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.OpDef.ArgDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpDef.ArgDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpDef.ArgDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpDef.ArgDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpDef.ArgDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDef.ArgDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpDef.ArgDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDef.ArgDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpDef.ArgDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDef.ArgDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.OpDef.ArgDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * For describing inputs and outputs.
+     * 
+ * + * Protobuf type {@code tensorflow.OpDef.ArgDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.OpDef.ArgDef) + org.tensorflow.framework.OpDef.ArgDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_ArgDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_ArgDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpDef.ArgDef.class, org.tensorflow.framework.OpDef.ArgDef.Builder.class); + } + + // Construct using org.tensorflow.framework.OpDef.ArgDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + description_ = ""; + + type_ = 0; + + typeAttr_ = ""; + + numberAttr_ = ""; + + typeListAttr_ = ""; + + isRef_ = false; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_ArgDef_descriptor; + } + + public org.tensorflow.framework.OpDef.ArgDef getDefaultInstanceForType() { + return org.tensorflow.framework.OpDef.ArgDef.getDefaultInstance(); + } + + public org.tensorflow.framework.OpDef.ArgDef build() { + org.tensorflow.framework.OpDef.ArgDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.OpDef.ArgDef buildPartial() { + org.tensorflow.framework.OpDef.ArgDef result = new org.tensorflow.framework.OpDef.ArgDef(this); + result.name_ = name_; + result.description_ = description_; + result.type_ = type_; + result.typeAttr_ = typeAttr_; + result.numberAttr_ = numberAttr_; + result.typeListAttr_ = typeListAttr_; + result.isRef_ = isRef_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.OpDef.ArgDef) { + return mergeFrom((org.tensorflow.framework.OpDef.ArgDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.OpDef.ArgDef other) { + if (other == org.tensorflow.framework.OpDef.ArgDef.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.getDescription().isEmpty()) { + description_ = other.description_; + onChanged(); + } + if (other.type_ != 0) { + setTypeValue(other.getTypeValue()); + } + if (!other.getTypeAttr().isEmpty()) { + typeAttr_ = other.typeAttr_; + onChanged(); + } + if (!other.getNumberAttr().isEmpty()) { + numberAttr_ = other.numberAttr_; + onChanged(); + } + if (!other.getTypeListAttr().isEmpty()) { + typeListAttr_ = other.typeListAttr_; + onChanged(); + } + if (other.getIsRef() != false) { + setIsRef(other.getIsRef()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.OpDef.ArgDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.OpDef.ArgDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+       * Name for the input/output.  Should match the regexp "[a-z][a-z0-9_]*".
+       * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Name for the input/output.  Should match the regexp "[a-z][a-z0-9_]*".
+       * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Name for the input/output.  Should match the regexp "[a-z][a-z0-9_]*".
+       * 
+ * + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+       * Name for the input/output.  Should match the regexp "[a-z][a-z0-9_]*".
+       * 
+ * + * optional string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+       * Name for the input/output.  Should match the regexp "[a-z][a-z0-9_]*".
+       * 
+ * + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.lang.Object description_ = ""; + /** + *
+       * Human readable description.
+       * 
+ * + * optional string description = 2; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Human readable description.
+       * 
+ * + * optional string description = 2; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Human readable description.
+       * 
+ * + * optional string description = 2; + */ + public Builder setDescription( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + description_ = value; + onChanged(); + return this; + } + /** + *
+       * Human readable description.
+       * 
+ * + * optional string description = 2; + */ + public Builder clearDescription() { + + description_ = getDefaultInstance().getDescription(); + onChanged(); + return this; + } + /** + *
+       * Human readable description.
+       * 
+ * + * optional string description = 2; + */ + public Builder setDescriptionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + description_ = value; + onChanged(); + return this; + } + + private int type_ = 0; + /** + *
+       * Describes the type of one or more tensors that are accepted/produced
+       * by this input/output arg.  The only legal combinations are:
+       * * For a single tensor: either the "type" field is set or the
+       *   "type_attr" field is set to the name of an attr with type "type".
+       * * For a sequence of tensors with the same type: the "number_attr"
+       *   field will be set to the name of an attr with type "int", and
+       *   either the "type" or "type_attr" field will be set as for
+       *   single tensors.
+       * * For a sequence of tensors, the "type_list_attr" field will be set
+       *   to the name of an attr with type "list(type)".
+       * 
+ * + * optional .tensorflow.DataType type = 3; + */ + public int getTypeValue() { + return type_; + } + /** + *
+       * Describes the type of one or more tensors that are accepted/produced
+       * by this input/output arg.  The only legal combinations are:
+       * * For a single tensor: either the "type" field is set or the
+       *   "type_attr" field is set to the name of an attr with type "type".
+       * * For a sequence of tensors with the same type: the "number_attr"
+       *   field will be set to the name of an attr with type "int", and
+       *   either the "type" or "type_attr" field will be set as for
+       *   single tensors.
+       * * For a sequence of tensors, the "type_list_attr" field will be set
+       *   to the name of an attr with type "list(type)".
+       * 
+ * + * optional .tensorflow.DataType type = 3; + */ + public Builder setTypeValue(int value) { + type_ = value; + onChanged(); + return this; + } + /** + *
+       * Describes the type of one or more tensors that are accepted/produced
+       * by this input/output arg.  The only legal combinations are:
+       * * For a single tensor: either the "type" field is set or the
+       *   "type_attr" field is set to the name of an attr with type "type".
+       * * For a sequence of tensors with the same type: the "number_attr"
+       *   field will be set to the name of an attr with type "int", and
+       *   either the "type" or "type_attr" field will be set as for
+       *   single tensors.
+       * * For a sequence of tensors, the "type_list_attr" field will be set
+       *   to the name of an attr with type "list(type)".
+       * 
+ * + * optional .tensorflow.DataType type = 3; + */ + public org.tensorflow.framework.DataType getType() { + org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf(type_); + return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result; + } + /** + *
+       * Describes the type of one or more tensors that are accepted/produced
+       * by this input/output arg.  The only legal combinations are:
+       * * For a single tensor: either the "type" field is set or the
+       *   "type_attr" field is set to the name of an attr with type "type".
+       * * For a sequence of tensors with the same type: the "number_attr"
+       *   field will be set to the name of an attr with type "int", and
+       *   either the "type" or "type_attr" field will be set as for
+       *   single tensors.
+       * * For a sequence of tensors, the "type_list_attr" field will be set
+       *   to the name of an attr with type "list(type)".
+       * 
+ * + * optional .tensorflow.DataType type = 3; + */ + public Builder setType(org.tensorflow.framework.DataType value) { + if (value == null) { + throw new NullPointerException(); + } + + type_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * Describes the type of one or more tensors that are accepted/produced
+       * by this input/output arg.  The only legal combinations are:
+       * * For a single tensor: either the "type" field is set or the
+       *   "type_attr" field is set to the name of an attr with type "type".
+       * * For a sequence of tensors with the same type: the "number_attr"
+       *   field will be set to the name of an attr with type "int", and
+       *   either the "type" or "type_attr" field will be set as for
+       *   single tensors.
+       * * For a sequence of tensors, the "type_list_attr" field will be set
+       *   to the name of an attr with type "list(type)".
+       * 
+ * + * optional .tensorflow.DataType type = 3; + */ + public Builder clearType() { + + type_ = 0; + onChanged(); + return this; + } + + private java.lang.Object typeAttr_ = ""; + /** + *
+       * if specified, attr must have type "type"
+       * 
+ * + * optional string type_attr = 4; + */ + public java.lang.String getTypeAttr() { + java.lang.Object ref = typeAttr_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + typeAttr_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * if specified, attr must have type "type"
+       * 
+ * + * optional string type_attr = 4; + */ + public com.google.protobuf.ByteString + getTypeAttrBytes() { + java.lang.Object ref = typeAttr_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + typeAttr_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * if specified, attr must have type "type"
+       * 
+ * + * optional string type_attr = 4; + */ + public Builder setTypeAttr( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + typeAttr_ = value; + onChanged(); + return this; + } + /** + *
+       * if specified, attr must have type "type"
+       * 
+ * + * optional string type_attr = 4; + */ + public Builder clearTypeAttr() { + + typeAttr_ = getDefaultInstance().getTypeAttr(); + onChanged(); + return this; + } + /** + *
+       * if specified, attr must have type "type"
+       * 
+ * + * optional string type_attr = 4; + */ + public Builder setTypeAttrBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + typeAttr_ = value; + onChanged(); + return this; + } + + private java.lang.Object numberAttr_ = ""; + /** + *
+       * if specified, attr must have type "int"
+       * 
+ * + * optional string number_attr = 5; + */ + public java.lang.String getNumberAttr() { + java.lang.Object ref = numberAttr_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + numberAttr_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * if specified, attr must have type "int"
+       * 
+ * + * optional string number_attr = 5; + */ + public com.google.protobuf.ByteString + getNumberAttrBytes() { + java.lang.Object ref = numberAttr_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + numberAttr_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * if specified, attr must have type "int"
+       * 
+ * + * optional string number_attr = 5; + */ + public Builder setNumberAttr( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + numberAttr_ = value; + onChanged(); + return this; + } + /** + *
+       * if specified, attr must have type "int"
+       * 
+ * + * optional string number_attr = 5; + */ + public Builder clearNumberAttr() { + + numberAttr_ = getDefaultInstance().getNumberAttr(); + onChanged(); + return this; + } + /** + *
+       * if specified, attr must have type "int"
+       * 
+ * + * optional string number_attr = 5; + */ + public Builder setNumberAttrBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + numberAttr_ = value; + onChanged(); + return this; + } + + private java.lang.Object typeListAttr_ = ""; + /** + *
+       * If specified, attr must have type "list(type)", and none of
+       * type, type_attr, and number_attr may be specified.
+       * 
+ * + * optional string type_list_attr = 6; + */ + public java.lang.String getTypeListAttr() { + java.lang.Object ref = typeListAttr_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + typeListAttr_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * If specified, attr must have type "list(type)", and none of
+       * type, type_attr, and number_attr may be specified.
+       * 
+ * + * optional string type_list_attr = 6; + */ + public com.google.protobuf.ByteString + getTypeListAttrBytes() { + java.lang.Object ref = typeListAttr_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + typeListAttr_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * If specified, attr must have type "list(type)", and none of
+       * type, type_attr, and number_attr may be specified.
+       * 
+ * + * optional string type_list_attr = 6; + */ + public Builder setTypeListAttr( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + typeListAttr_ = value; + onChanged(); + return this; + } + /** + *
+       * If specified, attr must have type "list(type)", and none of
+       * type, type_attr, and number_attr may be specified.
+       * 
+ * + * optional string type_list_attr = 6; + */ + public Builder clearTypeListAttr() { + + typeListAttr_ = getDefaultInstance().getTypeListAttr(); + onChanged(); + return this; + } + /** + *
+       * If specified, attr must have type "list(type)", and none of
+       * type, type_attr, and number_attr may be specified.
+       * 
+ * + * optional string type_list_attr = 6; + */ + public Builder setTypeListAttrBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + typeListAttr_ = value; + onChanged(); + return this; + } + + private boolean isRef_ ; + /** + *
+       * For inputs: if true, the inputs are required to be refs.
+       *   By default, inputs can be either refs or non-refs.
+       * For outputs: if true, outputs are refs, otherwise they are not.
+       * 
+ * + * optional bool is_ref = 16; + */ + public boolean getIsRef() { + return isRef_; + } + /** + *
+       * For inputs: if true, the inputs are required to be refs.
+       *   By default, inputs can be either refs or non-refs.
+       * For outputs: if true, outputs are refs, otherwise they are not.
+       * 
+ * + * optional bool is_ref = 16; + */ + public Builder setIsRef(boolean value) { + + isRef_ = value; + onChanged(); + return this; + } + /** + *
+       * For inputs: if true, the inputs are required to be refs.
+       *   By default, inputs can be either refs or non-refs.
+       * For outputs: if true, outputs are refs, otherwise they are not.
+       * 
+ * + * optional bool is_ref = 16; + */ + public Builder clearIsRef() { + + isRef_ = false; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.OpDef.ArgDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.OpDef.ArgDef) + private static final org.tensorflow.framework.OpDef.ArgDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.OpDef.ArgDef(); + } + + public static org.tensorflow.framework.OpDef.ArgDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ArgDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ArgDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.OpDef.ArgDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface AttrDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.OpDef.AttrDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * A descriptive name for the argument.  May be used, e.g. by the
+     * Python client, as a keyword argument name, and so should match
+     * the regexp "[a-z][a-z0-9_]+".
+     * 
+ * + * optional string name = 1; + */ + java.lang.String getName(); + /** + *
+     * A descriptive name for the argument.  May be used, e.g. by the
+     * Python client, as a keyword argument name, and so should match
+     * the regexp "[a-z][a-z0-9_]+".
+     * 
+ * + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+     * One of the type names from attr_value.proto ("string", "list(string)",
+     * "int", etc.).
+     * 
+ * + * optional string type = 2; + */ + java.lang.String getType(); + /** + *
+     * One of the type names from attr_value.proto ("string", "list(string)",
+     * "int", etc.).
+     * 
+ * + * optional string type = 2; + */ + com.google.protobuf.ByteString + getTypeBytes(); + + /** + *
+     * A reasonable default for this attribute if the user does not supply
+     * a value.  If not specified, the user must supply a value.
+     * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + boolean hasDefaultValue(); + /** + *
+     * A reasonable default for this attribute if the user does not supply
+     * a value.  If not specified, the user must supply a value.
+     * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + org.tensorflow.framework.AttrValue getDefaultValue(); + /** + *
+     * A reasonable default for this attribute if the user does not supply
+     * a value.  If not specified, the user must supply a value.
+     * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + org.tensorflow.framework.AttrValueOrBuilder getDefaultValueOrBuilder(); + + /** + *
+     * Human-readable description.
+     * 
+ * + * optional string description = 4; + */ + java.lang.String getDescription(); + /** + *
+     * Human-readable description.
+     * 
+ * + * optional string description = 4; + */ + com.google.protobuf.ByteString + getDescriptionBytes(); + + /** + *
+     * For type == "int", this is a minimum value.  For "list(___)"
+     * types, this is the minimum length.
+     * 
+ * + * optional bool has_minimum = 5; + */ + boolean getHasMinimum(); + + /** + * optional int64 minimum = 6; + */ + long getMinimum(); + + /** + *
+     * The set of allowed values.  Has type that is the "list" version
+     * of the "type" field above (uses the "list" field of AttrValue).
+     * If type == "type" or "list(type)" above, then the "type" field
+     * of "allowed_values.list" has the set of allowed DataTypes.
+     * If type == "string" or "list(string)", then the "s" field of
+     * "allowed_values.list" has the set of allowed strings.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + boolean hasAllowedValues(); + /** + *
+     * The set of allowed values.  Has type that is the "list" version
+     * of the "type" field above (uses the "list" field of AttrValue).
+     * If type == "type" or "list(type)" above, then the "type" field
+     * of "allowed_values.list" has the set of allowed DataTypes.
+     * If type == "string" or "list(string)", then the "s" field of
+     * "allowed_values.list" has the set of allowed strings.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + org.tensorflow.framework.AttrValue getAllowedValues(); + /** + *
+     * The set of allowed values.  Has type that is the "list" version
+     * of the "type" field above (uses the "list" field of AttrValue).
+     * If type == "type" or "list(type)" above, then the "type" field
+     * of "allowed_values.list" has the set of allowed DataTypes.
+     * If type == "string" or "list(string)", then the "s" field of
+     * "allowed_values.list" has the set of allowed strings.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + org.tensorflow.framework.AttrValueOrBuilder getAllowedValuesOrBuilder(); + } + /** + *
+   * Description of the graph-construction-time configuration of this
+   * Op.  That is to say, this describes the attr fields that will
+   * be specified in the NodeDef.
+   * 
+ * + * Protobuf type {@code tensorflow.OpDef.AttrDef} + */ + public static final class AttrDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.OpDef.AttrDef) + AttrDefOrBuilder { + // Use AttrDef.newBuilder() to construct. + private AttrDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private AttrDef() { + name_ = ""; + type_ = ""; + description_ = ""; + hasMinimum_ = false; + minimum_ = 0L; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private AttrDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + type_ = s; + break; + } + case 26: { + org.tensorflow.framework.AttrValue.Builder subBuilder = null; + if (defaultValue_ != null) { + subBuilder = defaultValue_.toBuilder(); + } + defaultValue_ = input.readMessage(org.tensorflow.framework.AttrValue.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(defaultValue_); + defaultValue_ = subBuilder.buildPartial(); + } + + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + description_ = s; + break; + } + case 40: { + + hasMinimum_ = input.readBool(); + break; + } + case 48: { + + minimum_ = input.readInt64(); + break; + } + case 58: { + org.tensorflow.framework.AttrValue.Builder subBuilder = null; + if (allowedValues_ != null) { + subBuilder = allowedValues_.toBuilder(); + } + allowedValues_ = input.readMessage(org.tensorflow.framework.AttrValue.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(allowedValues_); + allowedValues_ = subBuilder.buildPartial(); + } + + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_AttrDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_AttrDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpDef.AttrDef.class, org.tensorflow.framework.OpDef.AttrDef.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+     * A descriptive name for the argument.  May be used, e.g. by the
+     * Python client, as a keyword argument name, and so should match
+     * the regexp "[a-z][a-z0-9_]+".
+     * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * A descriptive name for the argument.  May be used, e.g. by the
+     * Python client, as a keyword argument name, and so should match
+     * the regexp "[a-z][a-z0-9_]+".
+     * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TYPE_FIELD_NUMBER = 2; + private volatile java.lang.Object type_; + /** + *
+     * One of the type names from attr_value.proto ("string", "list(string)",
+     * "int", etc.).
+     * 
+ * + * optional string type = 2; + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } + } + /** + *
+     * One of the type names from attr_value.proto ("string", "list(string)",
+     * "int", etc.).
+     * 
+ * + * optional string type = 2; + */ + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DEFAULT_VALUE_FIELD_NUMBER = 3; + private org.tensorflow.framework.AttrValue defaultValue_; + /** + *
+     * A reasonable default for this attribute if the user does not supply
+     * a value.  If not specified, the user must supply a value.
+     * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public boolean hasDefaultValue() { + return defaultValue_ != null; + } + /** + *
+     * A reasonable default for this attribute if the user does not supply
+     * a value.  If not specified, the user must supply a value.
+     * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public org.tensorflow.framework.AttrValue getDefaultValue() { + return defaultValue_ == null ? org.tensorflow.framework.AttrValue.getDefaultInstance() : defaultValue_; + } + /** + *
+     * A reasonable default for this attribute if the user does not supply
+     * a value.  If not specified, the user must supply a value.
+     * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public org.tensorflow.framework.AttrValueOrBuilder getDefaultValueOrBuilder() { + return getDefaultValue(); + } + + public static final int DESCRIPTION_FIELD_NUMBER = 4; + private volatile java.lang.Object description_; + /** + *
+     * Human-readable description.
+     * 
+ * + * optional string description = 4; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } + } + /** + *
+     * Human-readable description.
+     * 
+ * + * optional string description = 4; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int HAS_MINIMUM_FIELD_NUMBER = 5; + private boolean hasMinimum_; + /** + *
+     * For type == "int", this is a minimum value.  For "list(___)"
+     * types, this is the minimum length.
+     * 
+ * + * optional bool has_minimum = 5; + */ + public boolean getHasMinimum() { + return hasMinimum_; + } + + public static final int MINIMUM_FIELD_NUMBER = 6; + private long minimum_; + /** + * optional int64 minimum = 6; + */ + public long getMinimum() { + return minimum_; + } + + public static final int ALLOWED_VALUES_FIELD_NUMBER = 7; + private org.tensorflow.framework.AttrValue allowedValues_; + /** + *
+     * The set of allowed values.  Has type that is the "list" version
+     * of the "type" field above (uses the "list" field of AttrValue).
+     * If type == "type" or "list(type)" above, then the "type" field
+     * of "allowed_values.list" has the set of allowed DataTypes.
+     * If type == "string" or "list(string)", then the "s" field of
+     * "allowed_values.list" has the set of allowed strings.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public boolean hasAllowedValues() { + return allowedValues_ != null; + } + /** + *
+     * The set of allowed values.  Has type that is the "list" version
+     * of the "type" field above (uses the "list" field of AttrValue).
+     * If type == "type" or "list(type)" above, then the "type" field
+     * of "allowed_values.list" has the set of allowed DataTypes.
+     * If type == "string" or "list(string)", then the "s" field of
+     * "allowed_values.list" has the set of allowed strings.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public org.tensorflow.framework.AttrValue getAllowedValues() { + return allowedValues_ == null ? org.tensorflow.framework.AttrValue.getDefaultInstance() : allowedValues_; + } + /** + *
+     * The set of allowed values.  Has type that is the "list" version
+     * of the "type" field above (uses the "list" field of AttrValue).
+     * If type == "type" or "list(type)" above, then the "type" field
+     * of "allowed_values.list" has the set of allowed DataTypes.
+     * If type == "string" or "list(string)", then the "s" field of
+     * "allowed_values.list" has the set of allowed strings.
+     * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public org.tensorflow.framework.AttrValueOrBuilder getAllowedValuesOrBuilder() { + return getAllowedValues(); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (!getTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, type_); + } + if (defaultValue_ != null) { + output.writeMessage(3, getDefaultValue()); + } + if (!getDescriptionBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, description_); + } + if (hasMinimum_ != false) { + output.writeBool(5, hasMinimum_); + } + if (minimum_ != 0L) { + output.writeInt64(6, minimum_); + } + if (allowedValues_ != null) { + output.writeMessage(7, getAllowedValues()); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (!getTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, type_); + } + if (defaultValue_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getDefaultValue()); + } + if (!getDescriptionBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, description_); + } + if (hasMinimum_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, hasMinimum_); + } + if (minimum_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(6, minimum_); + } + if (allowedValues_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, getAllowedValues()); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.OpDef.AttrDef)) { + return super.equals(obj); + } + org.tensorflow.framework.OpDef.AttrDef other = (org.tensorflow.framework.OpDef.AttrDef) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && getType() + .equals(other.getType()); + result = result && (hasDefaultValue() == other.hasDefaultValue()); + if (hasDefaultValue()) { + result = result && getDefaultValue() + .equals(other.getDefaultValue()); + } + result = result && getDescription() + .equals(other.getDescription()); + result = result && (getHasMinimum() + == other.getHasMinimum()); + result = result && (getMinimum() + == other.getMinimum()); + result = result && (hasAllowedValues() == other.hasAllowedValues()); + if (hasAllowedValues()) { + result = result && getAllowedValues() + .equals(other.getAllowedValues()); + } + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getType().hashCode(); + if (hasDefaultValue()) { + hash = (37 * hash) + DEFAULT_VALUE_FIELD_NUMBER; + hash = (53 * hash) + getDefaultValue().hashCode(); + } + hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; + hash = (53 * hash) + getDescription().hashCode(); + hash = (37 * hash) + HAS_MINIMUM_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getHasMinimum()); + hash = (37 * hash) + MINIMUM_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getMinimum()); + if (hasAllowedValues()) { + hash = (37 * hash) + ALLOWED_VALUES_FIELD_NUMBER; + hash = (53 * hash) + getAllowedValues().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.OpDef.AttrDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpDef.AttrDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpDef.AttrDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpDef.AttrDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpDef.AttrDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDef.AttrDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpDef.AttrDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDef.AttrDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpDef.AttrDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDef.AttrDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.OpDef.AttrDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * Description of the graph-construction-time configuration of this
+     * Op.  That is to say, this describes the attr fields that will
+     * be specified in the NodeDef.
+     * 
+ * + * Protobuf type {@code tensorflow.OpDef.AttrDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.OpDef.AttrDef) + org.tensorflow.framework.OpDef.AttrDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_AttrDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_AttrDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpDef.AttrDef.class, org.tensorflow.framework.OpDef.AttrDef.Builder.class); + } + + // Construct using org.tensorflow.framework.OpDef.AttrDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + type_ = ""; + + if (defaultValueBuilder_ == null) { + defaultValue_ = null; + } else { + defaultValue_ = null; + defaultValueBuilder_ = null; + } + description_ = ""; + + hasMinimum_ = false; + + minimum_ = 0L; + + if (allowedValuesBuilder_ == null) { + allowedValues_ = null; + } else { + allowedValues_ = null; + allowedValuesBuilder_ = null; + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_AttrDef_descriptor; + } + + public org.tensorflow.framework.OpDef.AttrDef getDefaultInstanceForType() { + return org.tensorflow.framework.OpDef.AttrDef.getDefaultInstance(); + } + + public org.tensorflow.framework.OpDef.AttrDef build() { + org.tensorflow.framework.OpDef.AttrDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.OpDef.AttrDef buildPartial() { + org.tensorflow.framework.OpDef.AttrDef result = new org.tensorflow.framework.OpDef.AttrDef(this); + result.name_ = name_; + result.type_ = type_; + if (defaultValueBuilder_ == null) { + result.defaultValue_ = defaultValue_; + } else { + result.defaultValue_ = defaultValueBuilder_.build(); + } + result.description_ = description_; + result.hasMinimum_ = hasMinimum_; + result.minimum_ = minimum_; + if (allowedValuesBuilder_ == null) { + result.allowedValues_ = allowedValues_; + } else { + result.allowedValues_ = allowedValuesBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.OpDef.AttrDef) { + return mergeFrom((org.tensorflow.framework.OpDef.AttrDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.OpDef.AttrDef other) { + if (other == org.tensorflow.framework.OpDef.AttrDef.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.getType().isEmpty()) { + type_ = other.type_; + onChanged(); + } + if (other.hasDefaultValue()) { + mergeDefaultValue(other.getDefaultValue()); + } + if (!other.getDescription().isEmpty()) { + description_ = other.description_; + onChanged(); + } + if (other.getHasMinimum() != false) { + setHasMinimum(other.getHasMinimum()); + } + if (other.getMinimum() != 0L) { + setMinimum(other.getMinimum()); + } + if (other.hasAllowedValues()) { + mergeAllowedValues(other.getAllowedValues()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.OpDef.AttrDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.OpDef.AttrDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+       * A descriptive name for the argument.  May be used, e.g. by the
+       * Python client, as a keyword argument name, and so should match
+       * the regexp "[a-z][a-z0-9_]+".
+       * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * A descriptive name for the argument.  May be used, e.g. by the
+       * Python client, as a keyword argument name, and so should match
+       * the regexp "[a-z][a-z0-9_]+".
+       * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * A descriptive name for the argument.  May be used, e.g. by the
+       * Python client, as a keyword argument name, and so should match
+       * the regexp "[a-z][a-z0-9_]+".
+       * 
+ * + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+       * A descriptive name for the argument.  May be used, e.g. by the
+       * Python client, as a keyword argument name, and so should match
+       * the regexp "[a-z][a-z0-9_]+".
+       * 
+ * + * optional string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+       * A descriptive name for the argument.  May be used, e.g. by the
+       * Python client, as a keyword argument name, and so should match
+       * the regexp "[a-z][a-z0-9_]+".
+       * 
+ * + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.lang.Object type_ = ""; + /** + *
+       * One of the type names from attr_value.proto ("string", "list(string)",
+       * "int", etc.).
+       * 
+ * + * optional string type = 2; + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * One of the type names from attr_value.proto ("string", "list(string)",
+       * "int", etc.).
+       * 
+ * + * optional string type = 2; + */ + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * One of the type names from attr_value.proto ("string", "list(string)",
+       * "int", etc.).
+       * 
+ * + * optional string type = 2; + */ + public Builder setType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + type_ = value; + onChanged(); + return this; + } + /** + *
+       * One of the type names from attr_value.proto ("string", "list(string)",
+       * "int", etc.).
+       * 
+ * + * optional string type = 2; + */ + public Builder clearType() { + + type_ = getDefaultInstance().getType(); + onChanged(); + return this; + } + /** + *
+       * One of the type names from attr_value.proto ("string", "list(string)",
+       * "int", etc.).
+       * 
+ * + * optional string type = 2; + */ + public Builder setTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + type_ = value; + onChanged(); + return this; + } + + private org.tensorflow.framework.AttrValue defaultValue_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder> defaultValueBuilder_; + /** + *
+       * A reasonable default for this attribute if the user does not supply
+       * a value.  If not specified, the user must supply a value.
+       * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public boolean hasDefaultValue() { + return defaultValueBuilder_ != null || defaultValue_ != null; + } + /** + *
+       * A reasonable default for this attribute if the user does not supply
+       * a value.  If not specified, the user must supply a value.
+       * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public org.tensorflow.framework.AttrValue getDefaultValue() { + if (defaultValueBuilder_ == null) { + return defaultValue_ == null ? org.tensorflow.framework.AttrValue.getDefaultInstance() : defaultValue_; + } else { + return defaultValueBuilder_.getMessage(); + } + } + /** + *
+       * A reasonable default for this attribute if the user does not supply
+       * a value.  If not specified, the user must supply a value.
+       * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public Builder setDefaultValue(org.tensorflow.framework.AttrValue value) { + if (defaultValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + defaultValue_ = value; + onChanged(); + } else { + defaultValueBuilder_.setMessage(value); + } + + return this; + } + /** + *
+       * A reasonable default for this attribute if the user does not supply
+       * a value.  If not specified, the user must supply a value.
+       * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public Builder setDefaultValue( + org.tensorflow.framework.AttrValue.Builder builderForValue) { + if (defaultValueBuilder_ == null) { + defaultValue_ = builderForValue.build(); + onChanged(); + } else { + defaultValueBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+       * A reasonable default for this attribute if the user does not supply
+       * a value.  If not specified, the user must supply a value.
+       * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public Builder mergeDefaultValue(org.tensorflow.framework.AttrValue value) { + if (defaultValueBuilder_ == null) { + if (defaultValue_ != null) { + defaultValue_ = + org.tensorflow.framework.AttrValue.newBuilder(defaultValue_).mergeFrom(value).buildPartial(); + } else { + defaultValue_ = value; + } + onChanged(); + } else { + defaultValueBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+       * A reasonable default for this attribute if the user does not supply
+       * a value.  If not specified, the user must supply a value.
+       * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public Builder clearDefaultValue() { + if (defaultValueBuilder_ == null) { + defaultValue_ = null; + onChanged(); + } else { + defaultValue_ = null; + defaultValueBuilder_ = null; + } + + return this; + } + /** + *
+       * A reasonable default for this attribute if the user does not supply
+       * a value.  If not specified, the user must supply a value.
+       * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public org.tensorflow.framework.AttrValue.Builder getDefaultValueBuilder() { + + onChanged(); + return getDefaultValueFieldBuilder().getBuilder(); + } + /** + *
+       * A reasonable default for this attribute if the user does not supply
+       * a value.  If not specified, the user must supply a value.
+       * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + public org.tensorflow.framework.AttrValueOrBuilder getDefaultValueOrBuilder() { + if (defaultValueBuilder_ != null) { + return defaultValueBuilder_.getMessageOrBuilder(); + } else { + return defaultValue_ == null ? + org.tensorflow.framework.AttrValue.getDefaultInstance() : defaultValue_; + } + } + /** + *
+       * A reasonable default for this attribute if the user does not supply
+       * a value.  If not specified, the user must supply a value.
+       * 
+ * + * optional .tensorflow.AttrValue default_value = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder> + getDefaultValueFieldBuilder() { + if (defaultValueBuilder_ == null) { + defaultValueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder>( + getDefaultValue(), + getParentForChildren(), + isClean()); + defaultValue_ = null; + } + return defaultValueBuilder_; + } + + private java.lang.Object description_ = ""; + /** + *
+       * Human-readable description.
+       * 
+ * + * optional string description = 4; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Human-readable description.
+       * 
+ * + * optional string description = 4; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Human-readable description.
+       * 
+ * + * optional string description = 4; + */ + public Builder setDescription( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + description_ = value; + onChanged(); + return this; + } + /** + *
+       * Human-readable description.
+       * 
+ * + * optional string description = 4; + */ + public Builder clearDescription() { + + description_ = getDefaultInstance().getDescription(); + onChanged(); + return this; + } + /** + *
+       * Human-readable description.
+       * 
+ * + * optional string description = 4; + */ + public Builder setDescriptionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + description_ = value; + onChanged(); + return this; + } + + private boolean hasMinimum_ ; + /** + *
+       * For type == "int", this is a minimum value.  For "list(___)"
+       * types, this is the minimum length.
+       * 
+ * + * optional bool has_minimum = 5; + */ + public boolean getHasMinimum() { + return hasMinimum_; + } + /** + *
+       * For type == "int", this is a minimum value.  For "list(___)"
+       * types, this is the minimum length.
+       * 
+ * + * optional bool has_minimum = 5; + */ + public Builder setHasMinimum(boolean value) { + + hasMinimum_ = value; + onChanged(); + return this; + } + /** + *
+       * For type == "int", this is a minimum value.  For "list(___)"
+       * types, this is the minimum length.
+       * 
+ * + * optional bool has_minimum = 5; + */ + public Builder clearHasMinimum() { + + hasMinimum_ = false; + onChanged(); + return this; + } + + private long minimum_ ; + /** + * optional int64 minimum = 6; + */ + public long getMinimum() { + return minimum_; + } + /** + * optional int64 minimum = 6; + */ + public Builder setMinimum(long value) { + + minimum_ = value; + onChanged(); + return this; + } + /** + * optional int64 minimum = 6; + */ + public Builder clearMinimum() { + + minimum_ = 0L; + onChanged(); + return this; + } + + private org.tensorflow.framework.AttrValue allowedValues_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder> allowedValuesBuilder_; + /** + *
+       * The set of allowed values.  Has type that is the "list" version
+       * of the "type" field above (uses the "list" field of AttrValue).
+       * If type == "type" or "list(type)" above, then the "type" field
+       * of "allowed_values.list" has the set of allowed DataTypes.
+       * If type == "string" or "list(string)", then the "s" field of
+       * "allowed_values.list" has the set of allowed strings.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public boolean hasAllowedValues() { + return allowedValuesBuilder_ != null || allowedValues_ != null; + } + /** + *
+       * The set of allowed values.  Has type that is the "list" version
+       * of the "type" field above (uses the "list" field of AttrValue).
+       * If type == "type" or "list(type)" above, then the "type" field
+       * of "allowed_values.list" has the set of allowed DataTypes.
+       * If type == "string" or "list(string)", then the "s" field of
+       * "allowed_values.list" has the set of allowed strings.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public org.tensorflow.framework.AttrValue getAllowedValues() { + if (allowedValuesBuilder_ == null) { + return allowedValues_ == null ? org.tensorflow.framework.AttrValue.getDefaultInstance() : allowedValues_; + } else { + return allowedValuesBuilder_.getMessage(); + } + } + /** + *
+       * The set of allowed values.  Has type that is the "list" version
+       * of the "type" field above (uses the "list" field of AttrValue).
+       * If type == "type" or "list(type)" above, then the "type" field
+       * of "allowed_values.list" has the set of allowed DataTypes.
+       * If type == "string" or "list(string)", then the "s" field of
+       * "allowed_values.list" has the set of allowed strings.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public Builder setAllowedValues(org.tensorflow.framework.AttrValue value) { + if (allowedValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + allowedValues_ = value; + onChanged(); + } else { + allowedValuesBuilder_.setMessage(value); + } + + return this; + } + /** + *
+       * The set of allowed values.  Has type that is the "list" version
+       * of the "type" field above (uses the "list" field of AttrValue).
+       * If type == "type" or "list(type)" above, then the "type" field
+       * of "allowed_values.list" has the set of allowed DataTypes.
+       * If type == "string" or "list(string)", then the "s" field of
+       * "allowed_values.list" has the set of allowed strings.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public Builder setAllowedValues( + org.tensorflow.framework.AttrValue.Builder builderForValue) { + if (allowedValuesBuilder_ == null) { + allowedValues_ = builderForValue.build(); + onChanged(); + } else { + allowedValuesBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+       * The set of allowed values.  Has type that is the "list" version
+       * of the "type" field above (uses the "list" field of AttrValue).
+       * If type == "type" or "list(type)" above, then the "type" field
+       * of "allowed_values.list" has the set of allowed DataTypes.
+       * If type == "string" or "list(string)", then the "s" field of
+       * "allowed_values.list" has the set of allowed strings.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public Builder mergeAllowedValues(org.tensorflow.framework.AttrValue value) { + if (allowedValuesBuilder_ == null) { + if (allowedValues_ != null) { + allowedValues_ = + org.tensorflow.framework.AttrValue.newBuilder(allowedValues_).mergeFrom(value).buildPartial(); + } else { + allowedValues_ = value; + } + onChanged(); + } else { + allowedValuesBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+       * The set of allowed values.  Has type that is the "list" version
+       * of the "type" field above (uses the "list" field of AttrValue).
+       * If type == "type" or "list(type)" above, then the "type" field
+       * of "allowed_values.list" has the set of allowed DataTypes.
+       * If type == "string" or "list(string)", then the "s" field of
+       * "allowed_values.list" has the set of allowed strings.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public Builder clearAllowedValues() { + if (allowedValuesBuilder_ == null) { + allowedValues_ = null; + onChanged(); + } else { + allowedValues_ = null; + allowedValuesBuilder_ = null; + } + + return this; + } + /** + *
+       * The set of allowed values.  Has type that is the "list" version
+       * of the "type" field above (uses the "list" field of AttrValue).
+       * If type == "type" or "list(type)" above, then the "type" field
+       * of "allowed_values.list" has the set of allowed DataTypes.
+       * If type == "string" or "list(string)", then the "s" field of
+       * "allowed_values.list" has the set of allowed strings.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public org.tensorflow.framework.AttrValue.Builder getAllowedValuesBuilder() { + + onChanged(); + return getAllowedValuesFieldBuilder().getBuilder(); + } + /** + *
+       * The set of allowed values.  Has type that is the "list" version
+       * of the "type" field above (uses the "list" field of AttrValue).
+       * If type == "type" or "list(type)" above, then the "type" field
+       * of "allowed_values.list" has the set of allowed DataTypes.
+       * If type == "string" or "list(string)", then the "s" field of
+       * "allowed_values.list" has the set of allowed strings.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + public org.tensorflow.framework.AttrValueOrBuilder getAllowedValuesOrBuilder() { + if (allowedValuesBuilder_ != null) { + return allowedValuesBuilder_.getMessageOrBuilder(); + } else { + return allowedValues_ == null ? + org.tensorflow.framework.AttrValue.getDefaultInstance() : allowedValues_; + } + } + /** + *
+       * The set of allowed values.  Has type that is the "list" version
+       * of the "type" field above (uses the "list" field of AttrValue).
+       * If type == "type" or "list(type)" above, then the "type" field
+       * of "allowed_values.list" has the set of allowed DataTypes.
+       * If type == "string" or "list(string)", then the "s" field of
+       * "allowed_values.list" has the set of allowed strings.
+       * 
+ * + * optional .tensorflow.AttrValue allowed_values = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder> + getAllowedValuesFieldBuilder() { + if (allowedValuesBuilder_ == null) { + allowedValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.AttrValue, org.tensorflow.framework.AttrValue.Builder, org.tensorflow.framework.AttrValueOrBuilder>( + getAllowedValues(), + getParentForChildren(), + isClean()); + allowedValues_ = null; + } + return allowedValuesBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.OpDef.AttrDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.OpDef.AttrDef) + private static final org.tensorflow.framework.OpDef.AttrDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.OpDef.AttrDef(); + } + + public static org.tensorflow.framework.OpDef.AttrDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public AttrDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AttrDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.OpDef.AttrDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + *
+   * Op names starting with an underscore are reserved for internal use.
+   * Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
+   * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * Op names starting with an underscore are reserved for internal use.
+   * Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
+   * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int INPUT_ARG_FIELD_NUMBER = 2; + private java.util.List inputArg_; + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public java.util.List getInputArgList() { + return inputArg_; + } + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public java.util.List + getInputArgOrBuilderList() { + return inputArg_; + } + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public int getInputArgCount() { + return inputArg_.size(); + } + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public org.tensorflow.framework.OpDef.ArgDef getInputArg(int index) { + return inputArg_.get(index); + } + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public org.tensorflow.framework.OpDef.ArgDefOrBuilder getInputArgOrBuilder( + int index) { + return inputArg_.get(index); + } + + public static final int OUTPUT_ARG_FIELD_NUMBER = 3; + private java.util.List outputArg_; + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public java.util.List getOutputArgList() { + return outputArg_; + } + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public java.util.List + getOutputArgOrBuilderList() { + return outputArg_; + } + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public int getOutputArgCount() { + return outputArg_.size(); + } + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public org.tensorflow.framework.OpDef.ArgDef getOutputArg(int index) { + return outputArg_.get(index); + } + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public org.tensorflow.framework.OpDef.ArgDefOrBuilder getOutputArgOrBuilder( + int index) { + return outputArg_.get(index); + } + + public static final int ATTR_FIELD_NUMBER = 4; + private java.util.List attr_; + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public java.util.List getAttrList() { + return attr_; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public java.util.List + getAttrOrBuilderList() { + return attr_; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public int getAttrCount() { + return attr_.size(); + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public org.tensorflow.framework.OpDef.AttrDef getAttr(int index) { + return attr_.get(index); + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public org.tensorflow.framework.OpDef.AttrDefOrBuilder getAttrOrBuilder( + int index) { + return attr_.get(index); + } + + public static final int DEPRECATION_FIELD_NUMBER = 8; + private org.tensorflow.framework.OpDeprecation deprecation_; + /** + *
+   * Optional deprecation based on GraphDef versions.
+   * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public boolean hasDeprecation() { + return deprecation_ != null; + } + /** + *
+   * Optional deprecation based on GraphDef versions.
+   * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public org.tensorflow.framework.OpDeprecation getDeprecation() { + return deprecation_ == null ? org.tensorflow.framework.OpDeprecation.getDefaultInstance() : deprecation_; + } + /** + *
+   * Optional deprecation based on GraphDef versions.
+   * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public org.tensorflow.framework.OpDeprecationOrBuilder getDeprecationOrBuilder() { + return getDeprecation(); + } + + public static final int SUMMARY_FIELD_NUMBER = 5; + private volatile java.lang.Object summary_; + /** + *
+   * One-line human-readable description of what the Op does.
+   * 
+ * + * optional string summary = 5; + */ + public java.lang.String getSummary() { + java.lang.Object ref = summary_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + summary_ = s; + return s; + } + } + /** + *
+   * One-line human-readable description of what the Op does.
+   * 
+ * + * optional string summary = 5; + */ + public com.google.protobuf.ByteString + getSummaryBytes() { + java.lang.Object ref = summary_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + summary_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DESCRIPTION_FIELD_NUMBER = 6; + private volatile java.lang.Object description_; + /** + *
+   * Additional, longer human-readable description of what the Op does.
+   * 
+ * + * optional string description = 6; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } + } + /** + *
+   * Additional, longer human-readable description of what the Op does.
+   * 
+ * + * optional string description = 6; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int IS_COMMUTATIVE_FIELD_NUMBER = 18; + private boolean isCommutative_; + /** + *
+   * True if the operation is commutative ("op(a,b) == op(b,a)" for all inputs)
+   * 
+ * + * optional bool is_commutative = 18; + */ + public boolean getIsCommutative() { + return isCommutative_; + } + + public static final int IS_AGGREGATE_FIELD_NUMBER = 16; + private boolean isAggregate_; + /** + *
+   * If is_aggregate is true, then this operation accepts N >= 2
+   * inputs and produces 1 output all of the same type.  Should be
+   * associative and commutative, and produce output with the same
+   * shape as the input.  The optimizer may replace an aggregate op
+   * taking input from multiple devices with a tree of aggregate ops
+   * that aggregate locally within each device (and possibly within
+   * groups of nearby devices) before communicating.
+   * TODO(josh11b): Implement that optimization.
+   * 
+ * + * optional bool is_aggregate = 16; + */ + public boolean getIsAggregate() { + return isAggregate_; + } + + public static final int IS_STATEFUL_FIELD_NUMBER = 17; + private boolean isStateful_; + /** + *
+   * By default Ops may be moved between devices.  Stateful ops should
+   * either not be moved, or should only be moved if that state can also
+   * be moved (e.g. via some sort of save / restore).
+   * Stateful ops are guaranteed to never be optimized away by Common
+   * Subexpression Elimination (CSE).
+   * 
+ * + * optional bool is_stateful = 17; + */ + public boolean getIsStateful() { + return isStateful_; + } + + public static final int ALLOWS_UNINITIALIZED_INPUT_FIELD_NUMBER = 19; + private boolean allowsUninitializedInput_; + /** + *
+   * By default, all inputs to an Op must be initialized Tensors.  Ops
+   * that may initialize tensors for the first time should set this
+   * field to true, to allow the Op to take an uninitialized Tensor as
+   * input.
+   * 
+ * + * optional bool allows_uninitialized_input = 19; + */ + public boolean getAllowsUninitializedInput() { + return allowsUninitializedInput_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + for (int i = 0; i < inputArg_.size(); i++) { + output.writeMessage(2, inputArg_.get(i)); + } + for (int i = 0; i < outputArg_.size(); i++) { + output.writeMessage(3, outputArg_.get(i)); + } + for (int i = 0; i < attr_.size(); i++) { + output.writeMessage(4, attr_.get(i)); + } + if (!getSummaryBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, summary_); + } + if (!getDescriptionBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, description_); + } + if (deprecation_ != null) { + output.writeMessage(8, getDeprecation()); + } + if (isAggregate_ != false) { + output.writeBool(16, isAggregate_); + } + if (isStateful_ != false) { + output.writeBool(17, isStateful_); + } + if (isCommutative_ != false) { + output.writeBool(18, isCommutative_); + } + if (allowsUninitializedInput_ != false) { + output.writeBool(19, allowsUninitializedInput_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + for (int i = 0; i < inputArg_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, inputArg_.get(i)); + } + for (int i = 0; i < outputArg_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, outputArg_.get(i)); + } + for (int i = 0; i < attr_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, attr_.get(i)); + } + if (!getSummaryBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, summary_); + } + if (!getDescriptionBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, description_); + } + if (deprecation_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, getDeprecation()); + } + if (isAggregate_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(16, isAggregate_); + } + if (isStateful_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(17, isStateful_); + } + if (isCommutative_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(18, isCommutative_); + } + if (allowsUninitializedInput_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(19, allowsUninitializedInput_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.OpDef)) { + return super.equals(obj); + } + org.tensorflow.framework.OpDef other = (org.tensorflow.framework.OpDef) obj; + + boolean result = true; + result = result && getName() + .equals(other.getName()); + result = result && getInputArgList() + .equals(other.getInputArgList()); + result = result && getOutputArgList() + .equals(other.getOutputArgList()); + result = result && getAttrList() + .equals(other.getAttrList()); + result = result && (hasDeprecation() == other.hasDeprecation()); + if (hasDeprecation()) { + result = result && getDeprecation() + .equals(other.getDeprecation()); + } + result = result && getSummary() + .equals(other.getSummary()); + result = result && getDescription() + .equals(other.getDescription()); + result = result && (getIsCommutative() + == other.getIsCommutative()); + result = result && (getIsAggregate() + == other.getIsAggregate()); + result = result && (getIsStateful() + == other.getIsStateful()); + result = result && (getAllowsUninitializedInput() + == other.getAllowsUninitializedInput()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (getInputArgCount() > 0) { + hash = (37 * hash) + INPUT_ARG_FIELD_NUMBER; + hash = (53 * hash) + getInputArgList().hashCode(); + } + if (getOutputArgCount() > 0) { + hash = (37 * hash) + OUTPUT_ARG_FIELD_NUMBER; + hash = (53 * hash) + getOutputArgList().hashCode(); + } + if (getAttrCount() > 0) { + hash = (37 * hash) + ATTR_FIELD_NUMBER; + hash = (53 * hash) + getAttrList().hashCode(); + } + if (hasDeprecation()) { + hash = (37 * hash) + DEPRECATION_FIELD_NUMBER; + hash = (53 * hash) + getDeprecation().hashCode(); + } + hash = (37 * hash) + SUMMARY_FIELD_NUMBER; + hash = (53 * hash) + getSummary().hashCode(); + hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; + hash = (53 * hash) + getDescription().hashCode(); + hash = (37 * hash) + IS_COMMUTATIVE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIsCommutative()); + hash = (37 * hash) + IS_AGGREGATE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIsAggregate()); + hash = (37 * hash) + IS_STATEFUL_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIsStateful()); + hash = (37 * hash) + ALLOWS_UNINITIALIZED_INPUT_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getAllowsUninitializedInput()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.OpDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.OpDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Defines an operation. A NodeDef in a GraphDef specifies an Op by
+   * using the "op" field which should match the name of a OpDef.
+   * 
+ * + * Protobuf type {@code tensorflow.OpDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.OpDef) + org.tensorflow.framework.OpDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpDef.class, org.tensorflow.framework.OpDef.Builder.class); + } + + // Construct using org.tensorflow.framework.OpDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getInputArgFieldBuilder(); + getOutputArgFieldBuilder(); + getAttrFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + name_ = ""; + + if (inputArgBuilder_ == null) { + inputArg_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + inputArgBuilder_.clear(); + } + if (outputArgBuilder_ == null) { + outputArg_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + outputArgBuilder_.clear(); + } + if (attrBuilder_ == null) { + attr_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + attrBuilder_.clear(); + } + if (deprecationBuilder_ == null) { + deprecation_ = null; + } else { + deprecation_ = null; + deprecationBuilder_ = null; + } + summary_ = ""; + + description_ = ""; + + isCommutative_ = false; + + isAggregate_ = false; + + isStateful_ = false; + + allowsUninitializedInput_ = false; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDef_descriptor; + } + + public org.tensorflow.framework.OpDef getDefaultInstanceForType() { + return org.tensorflow.framework.OpDef.getDefaultInstance(); + } + + public org.tensorflow.framework.OpDef build() { + org.tensorflow.framework.OpDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.OpDef buildPartial() { + org.tensorflow.framework.OpDef result = new org.tensorflow.framework.OpDef(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.name_ = name_; + if (inputArgBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + inputArg_ = java.util.Collections.unmodifiableList(inputArg_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.inputArg_ = inputArg_; + } else { + result.inputArg_ = inputArgBuilder_.build(); + } + if (outputArgBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + outputArg_ = java.util.Collections.unmodifiableList(outputArg_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.outputArg_ = outputArg_; + } else { + result.outputArg_ = outputArgBuilder_.build(); + } + if (attrBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + attr_ = java.util.Collections.unmodifiableList(attr_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.attr_ = attr_; + } else { + result.attr_ = attrBuilder_.build(); + } + if (deprecationBuilder_ == null) { + result.deprecation_ = deprecation_; + } else { + result.deprecation_ = deprecationBuilder_.build(); + } + result.summary_ = summary_; + result.description_ = description_; + result.isCommutative_ = isCommutative_; + result.isAggregate_ = isAggregate_; + result.isStateful_ = isStateful_; + result.allowsUninitializedInput_ = allowsUninitializedInput_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.OpDef) { + return mergeFrom((org.tensorflow.framework.OpDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.OpDef other) { + if (other == org.tensorflow.framework.OpDef.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (inputArgBuilder_ == null) { + if (!other.inputArg_.isEmpty()) { + if (inputArg_.isEmpty()) { + inputArg_ = other.inputArg_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureInputArgIsMutable(); + inputArg_.addAll(other.inputArg_); + } + onChanged(); + } + } else { + if (!other.inputArg_.isEmpty()) { + if (inputArgBuilder_.isEmpty()) { + inputArgBuilder_.dispose(); + inputArgBuilder_ = null; + inputArg_ = other.inputArg_; + bitField0_ = (bitField0_ & ~0x00000002); + inputArgBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getInputArgFieldBuilder() : null; + } else { + inputArgBuilder_.addAllMessages(other.inputArg_); + } + } + } + if (outputArgBuilder_ == null) { + if (!other.outputArg_.isEmpty()) { + if (outputArg_.isEmpty()) { + outputArg_ = other.outputArg_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureOutputArgIsMutable(); + outputArg_.addAll(other.outputArg_); + } + onChanged(); + } + } else { + if (!other.outputArg_.isEmpty()) { + if (outputArgBuilder_.isEmpty()) { + outputArgBuilder_.dispose(); + outputArgBuilder_ = null; + outputArg_ = other.outputArg_; + bitField0_ = (bitField0_ & ~0x00000004); + outputArgBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getOutputArgFieldBuilder() : null; + } else { + outputArgBuilder_.addAllMessages(other.outputArg_); + } + } + } + if (attrBuilder_ == null) { + if (!other.attr_.isEmpty()) { + if (attr_.isEmpty()) { + attr_ = other.attr_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureAttrIsMutable(); + attr_.addAll(other.attr_); + } + onChanged(); + } + } else { + if (!other.attr_.isEmpty()) { + if (attrBuilder_.isEmpty()) { + attrBuilder_.dispose(); + attrBuilder_ = null; + attr_ = other.attr_; + bitField0_ = (bitField0_ & ~0x00000008); + attrBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getAttrFieldBuilder() : null; + } else { + attrBuilder_.addAllMessages(other.attr_); + } + } + } + if (other.hasDeprecation()) { + mergeDeprecation(other.getDeprecation()); + } + if (!other.getSummary().isEmpty()) { + summary_ = other.summary_; + onChanged(); + } + if (!other.getDescription().isEmpty()) { + description_ = other.description_; + onChanged(); + } + if (other.getIsCommutative() != false) { + setIsCommutative(other.getIsCommutative()); + } + if (other.getIsAggregate() != false) { + setIsAggregate(other.getIsAggregate()); + } + if (other.getIsStateful() != false) { + setIsStateful(other.getIsStateful()); + } + if (other.getAllowsUninitializedInput() != false) { + setAllowsUninitializedInput(other.getAllowsUninitializedInput()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.OpDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.OpDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+     * Op names starting with an underscore are reserved for internal use.
+     * Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
+     * 
+ * + * optional string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Op names starting with an underscore are reserved for internal use.
+     * Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
+     * 
+ * + * optional string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Op names starting with an underscore are reserved for internal use.
+     * Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
+     * 
+ * + * optional string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * Op names starting with an underscore are reserved for internal use.
+     * Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
+     * 
+ * + * optional string name = 1; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * Op names starting with an underscore are reserved for internal use.
+     * Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
+     * 
+ * + * optional string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.util.List inputArg_ = + java.util.Collections.emptyList(); + private void ensureInputArgIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + inputArg_ = new java.util.ArrayList(inputArg_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef.ArgDef, org.tensorflow.framework.OpDef.ArgDef.Builder, org.tensorflow.framework.OpDef.ArgDefOrBuilder> inputArgBuilder_; + + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public java.util.List getInputArgList() { + if (inputArgBuilder_ == null) { + return java.util.Collections.unmodifiableList(inputArg_); + } else { + return inputArgBuilder_.getMessageList(); + } + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public int getInputArgCount() { + if (inputArgBuilder_ == null) { + return inputArg_.size(); + } else { + return inputArgBuilder_.getCount(); + } + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public org.tensorflow.framework.OpDef.ArgDef getInputArg(int index) { + if (inputArgBuilder_ == null) { + return inputArg_.get(index); + } else { + return inputArgBuilder_.getMessage(index); + } + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public Builder setInputArg( + int index, org.tensorflow.framework.OpDef.ArgDef value) { + if (inputArgBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputArgIsMutable(); + inputArg_.set(index, value); + onChanged(); + } else { + inputArgBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public Builder setInputArg( + int index, org.tensorflow.framework.OpDef.ArgDef.Builder builderForValue) { + if (inputArgBuilder_ == null) { + ensureInputArgIsMutable(); + inputArg_.set(index, builderForValue.build()); + onChanged(); + } else { + inputArgBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public Builder addInputArg(org.tensorflow.framework.OpDef.ArgDef value) { + if (inputArgBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputArgIsMutable(); + inputArg_.add(value); + onChanged(); + } else { + inputArgBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public Builder addInputArg( + int index, org.tensorflow.framework.OpDef.ArgDef value) { + if (inputArgBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInputArgIsMutable(); + inputArg_.add(index, value); + onChanged(); + } else { + inputArgBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public Builder addInputArg( + org.tensorflow.framework.OpDef.ArgDef.Builder builderForValue) { + if (inputArgBuilder_ == null) { + ensureInputArgIsMutable(); + inputArg_.add(builderForValue.build()); + onChanged(); + } else { + inputArgBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public Builder addInputArg( + int index, org.tensorflow.framework.OpDef.ArgDef.Builder builderForValue) { + if (inputArgBuilder_ == null) { + ensureInputArgIsMutable(); + inputArg_.add(index, builderForValue.build()); + onChanged(); + } else { + inputArgBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public Builder addAllInputArg( + java.lang.Iterable values) { + if (inputArgBuilder_ == null) { + ensureInputArgIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, inputArg_); + onChanged(); + } else { + inputArgBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public Builder clearInputArg() { + if (inputArgBuilder_ == null) { + inputArg_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + inputArgBuilder_.clear(); + } + return this; + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public Builder removeInputArg(int index) { + if (inputArgBuilder_ == null) { + ensureInputArgIsMutable(); + inputArg_.remove(index); + onChanged(); + } else { + inputArgBuilder_.remove(index); + } + return this; + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public org.tensorflow.framework.OpDef.ArgDef.Builder getInputArgBuilder( + int index) { + return getInputArgFieldBuilder().getBuilder(index); + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public org.tensorflow.framework.OpDef.ArgDefOrBuilder getInputArgOrBuilder( + int index) { + if (inputArgBuilder_ == null) { + return inputArg_.get(index); } else { + return inputArgBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public java.util.List + getInputArgOrBuilderList() { + if (inputArgBuilder_ != null) { + return inputArgBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(inputArg_); + } + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public org.tensorflow.framework.OpDef.ArgDef.Builder addInputArgBuilder() { + return getInputArgFieldBuilder().addBuilder( + org.tensorflow.framework.OpDef.ArgDef.getDefaultInstance()); + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public org.tensorflow.framework.OpDef.ArgDef.Builder addInputArgBuilder( + int index) { + return getInputArgFieldBuilder().addBuilder( + index, org.tensorflow.framework.OpDef.ArgDef.getDefaultInstance()); + } + /** + *
+     * Description of the input(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + public java.util.List + getInputArgBuilderList() { + return getInputArgFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef.ArgDef, org.tensorflow.framework.OpDef.ArgDef.Builder, org.tensorflow.framework.OpDef.ArgDefOrBuilder> + getInputArgFieldBuilder() { + if (inputArgBuilder_ == null) { + inputArgBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef.ArgDef, org.tensorflow.framework.OpDef.ArgDef.Builder, org.tensorflow.framework.OpDef.ArgDefOrBuilder>( + inputArg_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + inputArg_ = null; + } + return inputArgBuilder_; + } + + private java.util.List outputArg_ = + java.util.Collections.emptyList(); + private void ensureOutputArgIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + outputArg_ = new java.util.ArrayList(outputArg_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef.ArgDef, org.tensorflow.framework.OpDef.ArgDef.Builder, org.tensorflow.framework.OpDef.ArgDefOrBuilder> outputArgBuilder_; + + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public java.util.List getOutputArgList() { + if (outputArgBuilder_ == null) { + return java.util.Collections.unmodifiableList(outputArg_); + } else { + return outputArgBuilder_.getMessageList(); + } + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public int getOutputArgCount() { + if (outputArgBuilder_ == null) { + return outputArg_.size(); + } else { + return outputArgBuilder_.getCount(); + } + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public org.tensorflow.framework.OpDef.ArgDef getOutputArg(int index) { + if (outputArgBuilder_ == null) { + return outputArg_.get(index); + } else { + return outputArgBuilder_.getMessage(index); + } + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public Builder setOutputArg( + int index, org.tensorflow.framework.OpDef.ArgDef value) { + if (outputArgBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputArgIsMutable(); + outputArg_.set(index, value); + onChanged(); + } else { + outputArgBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public Builder setOutputArg( + int index, org.tensorflow.framework.OpDef.ArgDef.Builder builderForValue) { + if (outputArgBuilder_ == null) { + ensureOutputArgIsMutable(); + outputArg_.set(index, builderForValue.build()); + onChanged(); + } else { + outputArgBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public Builder addOutputArg(org.tensorflow.framework.OpDef.ArgDef value) { + if (outputArgBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputArgIsMutable(); + outputArg_.add(value); + onChanged(); + } else { + outputArgBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public Builder addOutputArg( + int index, org.tensorflow.framework.OpDef.ArgDef value) { + if (outputArgBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOutputArgIsMutable(); + outputArg_.add(index, value); + onChanged(); + } else { + outputArgBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public Builder addOutputArg( + org.tensorflow.framework.OpDef.ArgDef.Builder builderForValue) { + if (outputArgBuilder_ == null) { + ensureOutputArgIsMutable(); + outputArg_.add(builderForValue.build()); + onChanged(); + } else { + outputArgBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public Builder addOutputArg( + int index, org.tensorflow.framework.OpDef.ArgDef.Builder builderForValue) { + if (outputArgBuilder_ == null) { + ensureOutputArgIsMutable(); + outputArg_.add(index, builderForValue.build()); + onChanged(); + } else { + outputArgBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public Builder addAllOutputArg( + java.lang.Iterable values) { + if (outputArgBuilder_ == null) { + ensureOutputArgIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, outputArg_); + onChanged(); + } else { + outputArgBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public Builder clearOutputArg() { + if (outputArgBuilder_ == null) { + outputArg_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + outputArgBuilder_.clear(); + } + return this; + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public Builder removeOutputArg(int index) { + if (outputArgBuilder_ == null) { + ensureOutputArgIsMutable(); + outputArg_.remove(index); + onChanged(); + } else { + outputArgBuilder_.remove(index); + } + return this; + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public org.tensorflow.framework.OpDef.ArgDef.Builder getOutputArgBuilder( + int index) { + return getOutputArgFieldBuilder().getBuilder(index); + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public org.tensorflow.framework.OpDef.ArgDefOrBuilder getOutputArgOrBuilder( + int index) { + if (outputArgBuilder_ == null) { + return outputArg_.get(index); } else { + return outputArgBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public java.util.List + getOutputArgOrBuilderList() { + if (outputArgBuilder_ != null) { + return outputArgBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(outputArg_); + } + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public org.tensorflow.framework.OpDef.ArgDef.Builder addOutputArgBuilder() { + return getOutputArgFieldBuilder().addBuilder( + org.tensorflow.framework.OpDef.ArgDef.getDefaultInstance()); + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public org.tensorflow.framework.OpDef.ArgDef.Builder addOutputArgBuilder( + int index) { + return getOutputArgFieldBuilder().addBuilder( + index, org.tensorflow.framework.OpDef.ArgDef.getDefaultInstance()); + } + /** + *
+     * Description of the output(s).
+     * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + public java.util.List + getOutputArgBuilderList() { + return getOutputArgFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef.ArgDef, org.tensorflow.framework.OpDef.ArgDef.Builder, org.tensorflow.framework.OpDef.ArgDefOrBuilder> + getOutputArgFieldBuilder() { + if (outputArgBuilder_ == null) { + outputArgBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef.ArgDef, org.tensorflow.framework.OpDef.ArgDef.Builder, org.tensorflow.framework.OpDef.ArgDefOrBuilder>( + outputArg_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + outputArg_ = null; + } + return outputArgBuilder_; + } + + private java.util.List attr_ = + java.util.Collections.emptyList(); + private void ensureAttrIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + attr_ = new java.util.ArrayList(attr_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef.AttrDef, org.tensorflow.framework.OpDef.AttrDef.Builder, org.tensorflow.framework.OpDef.AttrDefOrBuilder> attrBuilder_; + + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public java.util.List getAttrList() { + if (attrBuilder_ == null) { + return java.util.Collections.unmodifiableList(attr_); + } else { + return attrBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public int getAttrCount() { + if (attrBuilder_ == null) { + return attr_.size(); + } else { + return attrBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public org.tensorflow.framework.OpDef.AttrDef getAttr(int index) { + if (attrBuilder_ == null) { + return attr_.get(index); + } else { + return attrBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public Builder setAttr( + int index, org.tensorflow.framework.OpDef.AttrDef value) { + if (attrBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttrIsMutable(); + attr_.set(index, value); + onChanged(); + } else { + attrBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public Builder setAttr( + int index, org.tensorflow.framework.OpDef.AttrDef.Builder builderForValue) { + if (attrBuilder_ == null) { + ensureAttrIsMutable(); + attr_.set(index, builderForValue.build()); + onChanged(); + } else { + attrBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public Builder addAttr(org.tensorflow.framework.OpDef.AttrDef value) { + if (attrBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttrIsMutable(); + attr_.add(value); + onChanged(); + } else { + attrBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public Builder addAttr( + int index, org.tensorflow.framework.OpDef.AttrDef value) { + if (attrBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttrIsMutable(); + attr_.add(index, value); + onChanged(); + } else { + attrBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public Builder addAttr( + org.tensorflow.framework.OpDef.AttrDef.Builder builderForValue) { + if (attrBuilder_ == null) { + ensureAttrIsMutable(); + attr_.add(builderForValue.build()); + onChanged(); + } else { + attrBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public Builder addAttr( + int index, org.tensorflow.framework.OpDef.AttrDef.Builder builderForValue) { + if (attrBuilder_ == null) { + ensureAttrIsMutable(); + attr_.add(index, builderForValue.build()); + onChanged(); + } else { + attrBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public Builder addAllAttr( + java.lang.Iterable values) { + if (attrBuilder_ == null) { + ensureAttrIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, attr_); + onChanged(); + } else { + attrBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public Builder clearAttr() { + if (attrBuilder_ == null) { + attr_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + attrBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public Builder removeAttr(int index) { + if (attrBuilder_ == null) { + ensureAttrIsMutable(); + attr_.remove(index); + onChanged(); + } else { + attrBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public org.tensorflow.framework.OpDef.AttrDef.Builder getAttrBuilder( + int index) { + return getAttrFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public org.tensorflow.framework.OpDef.AttrDefOrBuilder getAttrOrBuilder( + int index) { + if (attrBuilder_ == null) { + return attr_.get(index); } else { + return attrBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public java.util.List + getAttrOrBuilderList() { + if (attrBuilder_ != null) { + return attrBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attr_); + } + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public org.tensorflow.framework.OpDef.AttrDef.Builder addAttrBuilder() { + return getAttrFieldBuilder().addBuilder( + org.tensorflow.framework.OpDef.AttrDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public org.tensorflow.framework.OpDef.AttrDef.Builder addAttrBuilder( + int index) { + return getAttrFieldBuilder().addBuilder( + index, org.tensorflow.framework.OpDef.AttrDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + public java.util.List + getAttrBuilderList() { + return getAttrFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef.AttrDef, org.tensorflow.framework.OpDef.AttrDef.Builder, org.tensorflow.framework.OpDef.AttrDefOrBuilder> + getAttrFieldBuilder() { + if (attrBuilder_ == null) { + attrBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef.AttrDef, org.tensorflow.framework.OpDef.AttrDef.Builder, org.tensorflow.framework.OpDef.AttrDefOrBuilder>( + attr_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + attr_ = null; + } + return attrBuilder_; + } + + private org.tensorflow.framework.OpDeprecation deprecation_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.OpDeprecation, org.tensorflow.framework.OpDeprecation.Builder, org.tensorflow.framework.OpDeprecationOrBuilder> deprecationBuilder_; + /** + *
+     * Optional deprecation based on GraphDef versions.
+     * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public boolean hasDeprecation() { + return deprecationBuilder_ != null || deprecation_ != null; + } + /** + *
+     * Optional deprecation based on GraphDef versions.
+     * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public org.tensorflow.framework.OpDeprecation getDeprecation() { + if (deprecationBuilder_ == null) { + return deprecation_ == null ? org.tensorflow.framework.OpDeprecation.getDefaultInstance() : deprecation_; + } else { + return deprecationBuilder_.getMessage(); + } + } + /** + *
+     * Optional deprecation based on GraphDef versions.
+     * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public Builder setDeprecation(org.tensorflow.framework.OpDeprecation value) { + if (deprecationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + deprecation_ = value; + onChanged(); + } else { + deprecationBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Optional deprecation based on GraphDef versions.
+     * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public Builder setDeprecation( + org.tensorflow.framework.OpDeprecation.Builder builderForValue) { + if (deprecationBuilder_ == null) { + deprecation_ = builderForValue.build(); + onChanged(); + } else { + deprecationBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Optional deprecation based on GraphDef versions.
+     * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public Builder mergeDeprecation(org.tensorflow.framework.OpDeprecation value) { + if (deprecationBuilder_ == null) { + if (deprecation_ != null) { + deprecation_ = + org.tensorflow.framework.OpDeprecation.newBuilder(deprecation_).mergeFrom(value).buildPartial(); + } else { + deprecation_ = value; + } + onChanged(); + } else { + deprecationBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Optional deprecation based on GraphDef versions.
+     * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public Builder clearDeprecation() { + if (deprecationBuilder_ == null) { + deprecation_ = null; + onChanged(); + } else { + deprecation_ = null; + deprecationBuilder_ = null; + } + + return this; + } + /** + *
+     * Optional deprecation based on GraphDef versions.
+     * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public org.tensorflow.framework.OpDeprecation.Builder getDeprecationBuilder() { + + onChanged(); + return getDeprecationFieldBuilder().getBuilder(); + } + /** + *
+     * Optional deprecation based on GraphDef versions.
+     * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + public org.tensorflow.framework.OpDeprecationOrBuilder getDeprecationOrBuilder() { + if (deprecationBuilder_ != null) { + return deprecationBuilder_.getMessageOrBuilder(); + } else { + return deprecation_ == null ? + org.tensorflow.framework.OpDeprecation.getDefaultInstance() : deprecation_; + } + } + /** + *
+     * Optional deprecation based on GraphDef versions.
+     * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.OpDeprecation, org.tensorflow.framework.OpDeprecation.Builder, org.tensorflow.framework.OpDeprecationOrBuilder> + getDeprecationFieldBuilder() { + if (deprecationBuilder_ == null) { + deprecationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.OpDeprecation, org.tensorflow.framework.OpDeprecation.Builder, org.tensorflow.framework.OpDeprecationOrBuilder>( + getDeprecation(), + getParentForChildren(), + isClean()); + deprecation_ = null; + } + return deprecationBuilder_; + } + + private java.lang.Object summary_ = ""; + /** + *
+     * One-line human-readable description of what the Op does.
+     * 
+ * + * optional string summary = 5; + */ + public java.lang.String getSummary() { + java.lang.Object ref = summary_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + summary_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * One-line human-readable description of what the Op does.
+     * 
+ * + * optional string summary = 5; + */ + public com.google.protobuf.ByteString + getSummaryBytes() { + java.lang.Object ref = summary_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + summary_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * One-line human-readable description of what the Op does.
+     * 
+ * + * optional string summary = 5; + */ + public Builder setSummary( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + summary_ = value; + onChanged(); + return this; + } + /** + *
+     * One-line human-readable description of what the Op does.
+     * 
+ * + * optional string summary = 5; + */ + public Builder clearSummary() { + + summary_ = getDefaultInstance().getSummary(); + onChanged(); + return this; + } + /** + *
+     * One-line human-readable description of what the Op does.
+     * 
+ * + * optional string summary = 5; + */ + public Builder setSummaryBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + summary_ = value; + onChanged(); + return this; + } + + private java.lang.Object description_ = ""; + /** + *
+     * Additional, longer human-readable description of what the Op does.
+     * 
+ * + * optional string description = 6; + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Additional, longer human-readable description of what the Op does.
+     * 
+ * + * optional string description = 6; + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Additional, longer human-readable description of what the Op does.
+     * 
+ * + * optional string description = 6; + */ + public Builder setDescription( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + description_ = value; + onChanged(); + return this; + } + /** + *
+     * Additional, longer human-readable description of what the Op does.
+     * 
+ * + * optional string description = 6; + */ + public Builder clearDescription() { + + description_ = getDefaultInstance().getDescription(); + onChanged(); + return this; + } + /** + *
+     * Additional, longer human-readable description of what the Op does.
+     * 
+ * + * optional string description = 6; + */ + public Builder setDescriptionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + description_ = value; + onChanged(); + return this; + } + + private boolean isCommutative_ ; + /** + *
+     * True if the operation is commutative ("op(a,b) == op(b,a)" for all inputs)
+     * 
+ * + * optional bool is_commutative = 18; + */ + public boolean getIsCommutative() { + return isCommutative_; + } + /** + *
+     * True if the operation is commutative ("op(a,b) == op(b,a)" for all inputs)
+     * 
+ * + * optional bool is_commutative = 18; + */ + public Builder setIsCommutative(boolean value) { + + isCommutative_ = value; + onChanged(); + return this; + } + /** + *
+     * True if the operation is commutative ("op(a,b) == op(b,a)" for all inputs)
+     * 
+ * + * optional bool is_commutative = 18; + */ + public Builder clearIsCommutative() { + + isCommutative_ = false; + onChanged(); + return this; + } + + private boolean isAggregate_ ; + /** + *
+     * If is_aggregate is true, then this operation accepts N >= 2
+     * inputs and produces 1 output all of the same type.  Should be
+     * associative and commutative, and produce output with the same
+     * shape as the input.  The optimizer may replace an aggregate op
+     * taking input from multiple devices with a tree of aggregate ops
+     * that aggregate locally within each device (and possibly within
+     * groups of nearby devices) before communicating.
+     * TODO(josh11b): Implement that optimization.
+     * 
+ * + * optional bool is_aggregate = 16; + */ + public boolean getIsAggregate() { + return isAggregate_; + } + /** + *
+     * If is_aggregate is true, then this operation accepts N >= 2
+     * inputs and produces 1 output all of the same type.  Should be
+     * associative and commutative, and produce output with the same
+     * shape as the input.  The optimizer may replace an aggregate op
+     * taking input from multiple devices with a tree of aggregate ops
+     * that aggregate locally within each device (and possibly within
+     * groups of nearby devices) before communicating.
+     * TODO(josh11b): Implement that optimization.
+     * 
+ * + * optional bool is_aggregate = 16; + */ + public Builder setIsAggregate(boolean value) { + + isAggregate_ = value; + onChanged(); + return this; + } + /** + *
+     * If is_aggregate is true, then this operation accepts N >= 2
+     * inputs and produces 1 output all of the same type.  Should be
+     * associative and commutative, and produce output with the same
+     * shape as the input.  The optimizer may replace an aggregate op
+     * taking input from multiple devices with a tree of aggregate ops
+     * that aggregate locally within each device (and possibly within
+     * groups of nearby devices) before communicating.
+     * TODO(josh11b): Implement that optimization.
+     * 
+ * + * optional bool is_aggregate = 16; + */ + public Builder clearIsAggregate() { + + isAggregate_ = false; + onChanged(); + return this; + } + + private boolean isStateful_ ; + /** + *
+     * By default Ops may be moved between devices.  Stateful ops should
+     * either not be moved, or should only be moved if that state can also
+     * be moved (e.g. via some sort of save / restore).
+     * Stateful ops are guaranteed to never be optimized away by Common
+     * Subexpression Elimination (CSE).
+     * 
+ * + * optional bool is_stateful = 17; + */ + public boolean getIsStateful() { + return isStateful_; + } + /** + *
+     * By default Ops may be moved between devices.  Stateful ops should
+     * either not be moved, or should only be moved if that state can also
+     * be moved (e.g. via some sort of save / restore).
+     * Stateful ops are guaranteed to never be optimized away by Common
+     * Subexpression Elimination (CSE).
+     * 
+ * + * optional bool is_stateful = 17; + */ + public Builder setIsStateful(boolean value) { + + isStateful_ = value; + onChanged(); + return this; + } + /** + *
+     * By default Ops may be moved between devices.  Stateful ops should
+     * either not be moved, or should only be moved if that state can also
+     * be moved (e.g. via some sort of save / restore).
+     * Stateful ops are guaranteed to never be optimized away by Common
+     * Subexpression Elimination (CSE).
+     * 
+ * + * optional bool is_stateful = 17; + */ + public Builder clearIsStateful() { + + isStateful_ = false; + onChanged(); + return this; + } + + private boolean allowsUninitializedInput_ ; + /** + *
+     * By default, all inputs to an Op must be initialized Tensors.  Ops
+     * that may initialize tensors for the first time should set this
+     * field to true, to allow the Op to take an uninitialized Tensor as
+     * input.
+     * 
+ * + * optional bool allows_uninitialized_input = 19; + */ + public boolean getAllowsUninitializedInput() { + return allowsUninitializedInput_; + } + /** + *
+     * By default, all inputs to an Op must be initialized Tensors.  Ops
+     * that may initialize tensors for the first time should set this
+     * field to true, to allow the Op to take an uninitialized Tensor as
+     * input.
+     * 
+ * + * optional bool allows_uninitialized_input = 19; + */ + public Builder setAllowsUninitializedInput(boolean value) { + + allowsUninitializedInput_ = value; + onChanged(); + return this; + } + /** + *
+     * By default, all inputs to an Op must be initialized Tensors.  Ops
+     * that may initialize tensors for the first time should set this
+     * field to true, to allow the Op to take an uninitialized Tensor as
+     * input.
+     * 
+ * + * optional bool allows_uninitialized_input = 19; + */ + public Builder clearAllowsUninitializedInput() { + + allowsUninitializedInput_ = false; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.OpDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.OpDef) + private static final org.tensorflow.framework.OpDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.OpDef(); + } + + public static org.tensorflow.framework.OpDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OpDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OpDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.OpDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDefOrBuilder.java new file mode 100644 index 0000000000000..31e683ec30e81 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDefOrBuilder.java @@ -0,0 +1,252 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/op_def.proto + +package org.tensorflow.framework; + +public interface OpDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.OpDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Op names starting with an underscore are reserved for internal use.
+   * Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
+   * 
+ * + * optional string name = 1; + */ + java.lang.String getName(); + /** + *
+   * Op names starting with an underscore are reserved for internal use.
+   * Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9_]*".
+   * 
+ * + * optional string name = 1; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + java.util.List + getInputArgList(); + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + org.tensorflow.framework.OpDef.ArgDef getInputArg(int index); + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + int getInputArgCount(); + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + java.util.List + getInputArgOrBuilderList(); + /** + *
+   * Description of the input(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef input_arg = 2; + */ + org.tensorflow.framework.OpDef.ArgDefOrBuilder getInputArgOrBuilder( + int index); + + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + java.util.List + getOutputArgList(); + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + org.tensorflow.framework.OpDef.ArgDef getOutputArg(int index); + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + int getOutputArgCount(); + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + java.util.List + getOutputArgOrBuilderList(); + /** + *
+   * Description of the output(s).
+   * 
+ * + * repeated .tensorflow.OpDef.ArgDef output_arg = 3; + */ + org.tensorflow.framework.OpDef.ArgDefOrBuilder getOutputArgOrBuilder( + int index); + + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + java.util.List + getAttrList(); + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + org.tensorflow.framework.OpDef.AttrDef getAttr(int index); + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + int getAttrCount(); + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + java.util.List + getAttrOrBuilderList(); + /** + * repeated .tensorflow.OpDef.AttrDef attr = 4; + */ + org.tensorflow.framework.OpDef.AttrDefOrBuilder getAttrOrBuilder( + int index); + + /** + *
+   * Optional deprecation based on GraphDef versions.
+   * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + boolean hasDeprecation(); + /** + *
+   * Optional deprecation based on GraphDef versions.
+   * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + org.tensorflow.framework.OpDeprecation getDeprecation(); + /** + *
+   * Optional deprecation based on GraphDef versions.
+   * 
+ * + * optional .tensorflow.OpDeprecation deprecation = 8; + */ + org.tensorflow.framework.OpDeprecationOrBuilder getDeprecationOrBuilder(); + + /** + *
+   * One-line human-readable description of what the Op does.
+   * 
+ * + * optional string summary = 5; + */ + java.lang.String getSummary(); + /** + *
+   * One-line human-readable description of what the Op does.
+   * 
+ * + * optional string summary = 5; + */ + com.google.protobuf.ByteString + getSummaryBytes(); + + /** + *
+   * Additional, longer human-readable description of what the Op does.
+   * 
+ * + * optional string description = 6; + */ + java.lang.String getDescription(); + /** + *
+   * Additional, longer human-readable description of what the Op does.
+   * 
+ * + * optional string description = 6; + */ + com.google.protobuf.ByteString + getDescriptionBytes(); + + /** + *
+   * True if the operation is commutative ("op(a,b) == op(b,a)" for all inputs)
+   * 
+ * + * optional bool is_commutative = 18; + */ + boolean getIsCommutative(); + + /** + *
+   * If is_aggregate is true, then this operation accepts N >= 2
+   * inputs and produces 1 output all of the same type.  Should be
+   * associative and commutative, and produce output with the same
+   * shape as the input.  The optimizer may replace an aggregate op
+   * taking input from multiple devices with a tree of aggregate ops
+   * that aggregate locally within each device (and possibly within
+   * groups of nearby devices) before communicating.
+   * TODO(josh11b): Implement that optimization.
+   * 
+ * + * optional bool is_aggregate = 16; + */ + boolean getIsAggregate(); + + /** + *
+   * By default Ops may be moved between devices.  Stateful ops should
+   * either not be moved, or should only be moved if that state can also
+   * be moved (e.g. via some sort of save / restore).
+   * Stateful ops are guaranteed to never be optimized away by Common
+   * Subexpression Elimination (CSE).
+   * 
+ * + * optional bool is_stateful = 17; + */ + boolean getIsStateful(); + + /** + *
+   * By default, all inputs to an Op must be initialized Tensors.  Ops
+   * that may initialize tensors for the first time should set this
+   * field to true, to allow the Op to take an uninitialized Tensor as
+   * input.
+   * 
+ * + * optional bool allows_uninitialized_input = 19; + */ + boolean getAllowsUninitializedInput(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDefProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDefProtos.java new file mode 100644 index 0000000000000..b1e695513d1c3 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDefProtos.java @@ -0,0 +1,126 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/op_def.proto + +package org.tensorflow.framework; + +public final class OpDefProtos { + private OpDefProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_OpDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_OpDef_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_OpDef_ArgDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_OpDef_ArgDef_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_OpDef_AttrDef_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_OpDef_AttrDef_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_OpDeprecation_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_OpDeprecation_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_OpList_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_OpList_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n&tensorflow/core/framework/op_def.proto" + + "\022\ntensorflow\032*tensorflow/core/framework/" + + "attr_value.proto\032%tensorflow/core/framew" + + "ork/types.proto\"\270\005\n\005OpDef\022\014\n\004name\030\001 \001(\t\022" + + "+\n\tinput_arg\030\002 \003(\0132\030.tensorflow.OpDef.Ar" + + "gDef\022,\n\noutput_arg\030\003 \003(\0132\030.tensorflow.Op" + + "Def.ArgDef\022\'\n\004attr\030\004 \003(\0132\031.tensorflow.Op" + + "Def.AttrDef\022.\n\013deprecation\030\010 \001(\0132\031.tenso" + + "rflow.OpDeprecation\022\017\n\007summary\030\005 \001(\t\022\023\n\013" + + "description\030\006 \001(\t\022\026\n\016is_commutative\030\022 \001(", + "\010\022\024\n\014is_aggregate\030\020 \001(\010\022\023\n\013is_stateful\030\021" + + " \001(\010\022\"\n\032allows_uninitialized_input\030\023 \001(\010" + + "\032\237\001\n\006ArgDef\022\014\n\004name\030\001 \001(\t\022\023\n\013description" + + "\030\002 \001(\t\022\"\n\004type\030\003 \001(\0162\024.tensorflow.DataTy" + + "pe\022\021\n\ttype_attr\030\004 \001(\t\022\023\n\013number_attr\030\005 \001" + + "(\t\022\026\n\016type_list_attr\030\006 \001(\t\022\016\n\006is_ref\030\020 \001" + + "(\010\032\275\001\n\007AttrDef\022\014\n\004name\030\001 \001(\t\022\014\n\004type\030\002 \001" + + "(\t\022,\n\rdefault_value\030\003 \001(\0132\025.tensorflow.A" + + "ttrValue\022\023\n\013description\030\004 \001(\t\022\023\n\013has_min" + + "imum\030\005 \001(\010\022\017\n\007minimum\030\006 \001(\003\022-\n\016allowed_v", + "alues\030\007 \001(\0132\025.tensorflow.AttrValue\"5\n\rOp" + + "Deprecation\022\017\n\007version\030\001 \001(\005\022\023\n\013explanat" + + "ion\030\002 \001(\t\"\'\n\006OpList\022\035\n\002op\030\001 \003(\0132\021.tensor" + + "flow.OpDefB,\n\030org.tensorflow.frameworkB\013" + + "OpDefProtosP\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.AttrValueProtos.getDescriptor(), + org.tensorflow.framework.TypesProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_OpDef_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_OpDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_OpDef_descriptor, + new java.lang.String[] { "Name", "InputArg", "OutputArg", "Attr", "Deprecation", "Summary", "Description", "IsCommutative", "IsAggregate", "IsStateful", "AllowsUninitializedInput", }); + internal_static_tensorflow_OpDef_ArgDef_descriptor = + internal_static_tensorflow_OpDef_descriptor.getNestedTypes().get(0); + internal_static_tensorflow_OpDef_ArgDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_OpDef_ArgDef_descriptor, + new java.lang.String[] { "Name", "Description", "Type", "TypeAttr", "NumberAttr", "TypeListAttr", "IsRef", }); + internal_static_tensorflow_OpDef_AttrDef_descriptor = + internal_static_tensorflow_OpDef_descriptor.getNestedTypes().get(1); + internal_static_tensorflow_OpDef_AttrDef_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_OpDef_AttrDef_descriptor, + new java.lang.String[] { "Name", "Type", "DefaultValue", "Description", "HasMinimum", "Minimum", "AllowedValues", }); + internal_static_tensorflow_OpDeprecation_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_tensorflow_OpDeprecation_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_OpDeprecation_descriptor, + new java.lang.String[] { "Version", "Explanation", }); + internal_static_tensorflow_OpList_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_tensorflow_OpList_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_OpList_descriptor, + new java.lang.String[] { "Op", }); + org.tensorflow.framework.AttrValueProtos.getDescriptor(); + org.tensorflow.framework.TypesProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDeprecation.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDeprecation.java new file mode 100644 index 0000000000000..513d2e123fd06 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDeprecation.java @@ -0,0 +1,604 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/op_def.proto + +package org.tensorflow.framework; + +/** + *
+ * Information about version-dependent deprecation of an op
+ * 
+ * + * Protobuf type {@code tensorflow.OpDeprecation} + */ +public final class OpDeprecation extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.OpDeprecation) + OpDeprecationOrBuilder { + // Use OpDeprecation.newBuilder() to construct. + private OpDeprecation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private OpDeprecation() { + version_ = 0; + explanation_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private OpDeprecation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + version_ = input.readInt32(); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + explanation_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDeprecation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDeprecation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpDeprecation.class, org.tensorflow.framework.OpDeprecation.Builder.class); + } + + public static final int VERSION_FIELD_NUMBER = 1; + private int version_; + /** + *
+   * First GraphDef version at which the op is disallowed.
+   * 
+ * + * optional int32 version = 1; + */ + public int getVersion() { + return version_; + } + + public static final int EXPLANATION_FIELD_NUMBER = 2; + private volatile java.lang.Object explanation_; + /** + *
+   * Explanation of why it was deprecated and what to use instead.
+   * 
+ * + * optional string explanation = 2; + */ + public java.lang.String getExplanation() { + java.lang.Object ref = explanation_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + explanation_ = s; + return s; + } + } + /** + *
+   * Explanation of why it was deprecated and what to use instead.
+   * 
+ * + * optional string explanation = 2; + */ + public com.google.protobuf.ByteString + getExplanationBytes() { + java.lang.Object ref = explanation_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + explanation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (version_ != 0) { + output.writeInt32(1, version_); + } + if (!getExplanationBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, explanation_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (version_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, version_); + } + if (!getExplanationBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, explanation_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.OpDeprecation)) { + return super.equals(obj); + } + org.tensorflow.framework.OpDeprecation other = (org.tensorflow.framework.OpDeprecation) obj; + + boolean result = true; + result = result && (getVersion() + == other.getVersion()); + result = result && getExplanation() + .equals(other.getExplanation()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion(); + hash = (37 * hash) + EXPLANATION_FIELD_NUMBER; + hash = (53 * hash) + getExplanation().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.OpDeprecation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpDeprecation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpDeprecation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpDeprecation parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpDeprecation parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDeprecation parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpDeprecation parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDeprecation parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpDeprecation parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpDeprecation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.OpDeprecation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Information about version-dependent deprecation of an op
+   * 
+ * + * Protobuf type {@code tensorflow.OpDeprecation} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.OpDeprecation) + org.tensorflow.framework.OpDeprecationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDeprecation_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDeprecation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpDeprecation.class, org.tensorflow.framework.OpDeprecation.Builder.class); + } + + // Construct using org.tensorflow.framework.OpDeprecation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + version_ = 0; + + explanation_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpDeprecation_descriptor; + } + + public org.tensorflow.framework.OpDeprecation getDefaultInstanceForType() { + return org.tensorflow.framework.OpDeprecation.getDefaultInstance(); + } + + public org.tensorflow.framework.OpDeprecation build() { + org.tensorflow.framework.OpDeprecation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.OpDeprecation buildPartial() { + org.tensorflow.framework.OpDeprecation result = new org.tensorflow.framework.OpDeprecation(this); + result.version_ = version_; + result.explanation_ = explanation_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.OpDeprecation) { + return mergeFrom((org.tensorflow.framework.OpDeprecation)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.OpDeprecation other) { + if (other == org.tensorflow.framework.OpDeprecation.getDefaultInstance()) return this; + if (other.getVersion() != 0) { + setVersion(other.getVersion()); + } + if (!other.getExplanation().isEmpty()) { + explanation_ = other.explanation_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.OpDeprecation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.OpDeprecation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int version_ ; + /** + *
+     * First GraphDef version at which the op is disallowed.
+     * 
+ * + * optional int32 version = 1; + */ + public int getVersion() { + return version_; + } + /** + *
+     * First GraphDef version at which the op is disallowed.
+     * 
+ * + * optional int32 version = 1; + */ + public Builder setVersion(int value) { + + version_ = value; + onChanged(); + return this; + } + /** + *
+     * First GraphDef version at which the op is disallowed.
+     * 
+ * + * optional int32 version = 1; + */ + public Builder clearVersion() { + + version_ = 0; + onChanged(); + return this; + } + + private java.lang.Object explanation_ = ""; + /** + *
+     * Explanation of why it was deprecated and what to use instead.
+     * 
+ * + * optional string explanation = 2; + */ + public java.lang.String getExplanation() { + java.lang.Object ref = explanation_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + explanation_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Explanation of why it was deprecated and what to use instead.
+     * 
+ * + * optional string explanation = 2; + */ + public com.google.protobuf.ByteString + getExplanationBytes() { + java.lang.Object ref = explanation_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + explanation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Explanation of why it was deprecated and what to use instead.
+     * 
+ * + * optional string explanation = 2; + */ + public Builder setExplanation( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + explanation_ = value; + onChanged(); + return this; + } + /** + *
+     * Explanation of why it was deprecated and what to use instead.
+     * 
+ * + * optional string explanation = 2; + */ + public Builder clearExplanation() { + + explanation_ = getDefaultInstance().getExplanation(); + onChanged(); + return this; + } + /** + *
+     * Explanation of why it was deprecated and what to use instead.
+     * 
+ * + * optional string explanation = 2; + */ + public Builder setExplanationBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + explanation_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.OpDeprecation) + } + + // @@protoc_insertion_point(class_scope:tensorflow.OpDeprecation) + private static final org.tensorflow.framework.OpDeprecation DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.OpDeprecation(); + } + + public static org.tensorflow.framework.OpDeprecation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OpDeprecation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OpDeprecation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.OpDeprecation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDeprecationOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDeprecationOrBuilder.java new file mode 100644 index 0000000000000..5dd2fbed898b3 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpDeprecationOrBuilder.java @@ -0,0 +1,36 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/op_def.proto + +package org.tensorflow.framework; + +public interface OpDeprecationOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.OpDeprecation) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * First GraphDef version at which the op is disallowed.
+   * 
+ * + * optional int32 version = 1; + */ + int getVersion(); + + /** + *
+   * Explanation of why it was deprecated and what to use instead.
+   * 
+ * + * optional string explanation = 2; + */ + java.lang.String getExplanation(); + /** + *
+   * Explanation of why it was deprecated and what to use instead.
+   * 
+ * + * optional string explanation = 2; + */ + com.google.protobuf.ByteString + getExplanationBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpList.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpList.java new file mode 100644 index 0000000000000..2c85b4c905e5d --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpList.java @@ -0,0 +1,720 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/op_def.proto + +package org.tensorflow.framework; + +/** + *
+ * A collection of OpDefs
+ * 
+ * + * Protobuf type {@code tensorflow.OpList} + */ +public final class OpList extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.OpList) + OpListOrBuilder { + // Use OpList.newBuilder() to construct. + private OpList(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private OpList() { + op_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private OpList( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + op_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + op_.add( + input.readMessage(org.tensorflow.framework.OpDef.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + op_ = java.util.Collections.unmodifiableList(op_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpList_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpList_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpList.class, org.tensorflow.framework.OpList.Builder.class); + } + + public static final int OP_FIELD_NUMBER = 1; + private java.util.List op_; + /** + * repeated .tensorflow.OpDef op = 1; + */ + public java.util.List getOpList() { + return op_; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public java.util.List + getOpOrBuilderList() { + return op_; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public int getOpCount() { + return op_.size(); + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public org.tensorflow.framework.OpDef getOp(int index) { + return op_.get(index); + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public org.tensorflow.framework.OpDefOrBuilder getOpOrBuilder( + int index) { + return op_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < op_.size(); i++) { + output.writeMessage(1, op_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < op_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, op_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.OpList)) { + return super.equals(obj); + } + org.tensorflow.framework.OpList other = (org.tensorflow.framework.OpList) obj; + + boolean result = true; + result = result && getOpList() + .equals(other.getOpList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getOpCount() > 0) { + hash = (37 * hash) + OP_FIELD_NUMBER; + hash = (53 * hash) + getOpList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.OpList parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpList parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpList parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OpList parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OpList parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpList parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpList parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpList parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OpList parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OpList parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.OpList prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * A collection of OpDefs
+   * 
+ * + * Protobuf type {@code tensorflow.OpList} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.OpList) + org.tensorflow.framework.OpListOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpList_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpList_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OpList.class, org.tensorflow.framework.OpList.Builder.class); + } + + // Construct using org.tensorflow.framework.OpList.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getOpFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (opBuilder_ == null) { + op_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + opBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.OpDefProtos.internal_static_tensorflow_OpList_descriptor; + } + + public org.tensorflow.framework.OpList getDefaultInstanceForType() { + return org.tensorflow.framework.OpList.getDefaultInstance(); + } + + public org.tensorflow.framework.OpList build() { + org.tensorflow.framework.OpList result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.OpList buildPartial() { + org.tensorflow.framework.OpList result = new org.tensorflow.framework.OpList(this); + int from_bitField0_ = bitField0_; + if (opBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + op_ = java.util.Collections.unmodifiableList(op_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.op_ = op_; + } else { + result.op_ = opBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.OpList) { + return mergeFrom((org.tensorflow.framework.OpList)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.OpList other) { + if (other == org.tensorflow.framework.OpList.getDefaultInstance()) return this; + if (opBuilder_ == null) { + if (!other.op_.isEmpty()) { + if (op_.isEmpty()) { + op_ = other.op_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureOpIsMutable(); + op_.addAll(other.op_); + } + onChanged(); + } + } else { + if (!other.op_.isEmpty()) { + if (opBuilder_.isEmpty()) { + opBuilder_.dispose(); + opBuilder_ = null; + op_ = other.op_; + bitField0_ = (bitField0_ & ~0x00000001); + opBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getOpFieldBuilder() : null; + } else { + opBuilder_.addAllMessages(other.op_); + } + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.OpList parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.OpList) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List op_ = + java.util.Collections.emptyList(); + private void ensureOpIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + op_ = new java.util.ArrayList(op_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef, org.tensorflow.framework.OpDef.Builder, org.tensorflow.framework.OpDefOrBuilder> opBuilder_; + + /** + * repeated .tensorflow.OpDef op = 1; + */ + public java.util.List getOpList() { + if (opBuilder_ == null) { + return java.util.Collections.unmodifiableList(op_); + } else { + return opBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public int getOpCount() { + if (opBuilder_ == null) { + return op_.size(); + } else { + return opBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public org.tensorflow.framework.OpDef getOp(int index) { + if (opBuilder_ == null) { + return op_.get(index); + } else { + return opBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public Builder setOp( + int index, org.tensorflow.framework.OpDef value) { + if (opBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpIsMutable(); + op_.set(index, value); + onChanged(); + } else { + opBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public Builder setOp( + int index, org.tensorflow.framework.OpDef.Builder builderForValue) { + if (opBuilder_ == null) { + ensureOpIsMutable(); + op_.set(index, builderForValue.build()); + onChanged(); + } else { + opBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public Builder addOp(org.tensorflow.framework.OpDef value) { + if (opBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpIsMutable(); + op_.add(value); + onChanged(); + } else { + opBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public Builder addOp( + int index, org.tensorflow.framework.OpDef value) { + if (opBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpIsMutable(); + op_.add(index, value); + onChanged(); + } else { + opBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public Builder addOp( + org.tensorflow.framework.OpDef.Builder builderForValue) { + if (opBuilder_ == null) { + ensureOpIsMutable(); + op_.add(builderForValue.build()); + onChanged(); + } else { + opBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public Builder addOp( + int index, org.tensorflow.framework.OpDef.Builder builderForValue) { + if (opBuilder_ == null) { + ensureOpIsMutable(); + op_.add(index, builderForValue.build()); + onChanged(); + } else { + opBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public Builder addAllOp( + java.lang.Iterable values) { + if (opBuilder_ == null) { + ensureOpIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, op_); + onChanged(); + } else { + opBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public Builder clearOp() { + if (opBuilder_ == null) { + op_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + opBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public Builder removeOp(int index) { + if (opBuilder_ == null) { + ensureOpIsMutable(); + op_.remove(index); + onChanged(); + } else { + opBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public org.tensorflow.framework.OpDef.Builder getOpBuilder( + int index) { + return getOpFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public org.tensorflow.framework.OpDefOrBuilder getOpOrBuilder( + int index) { + if (opBuilder_ == null) { + return op_.get(index); } else { + return opBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public java.util.List + getOpOrBuilderList() { + if (opBuilder_ != null) { + return opBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(op_); + } + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public org.tensorflow.framework.OpDef.Builder addOpBuilder() { + return getOpFieldBuilder().addBuilder( + org.tensorflow.framework.OpDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public org.tensorflow.framework.OpDef.Builder addOpBuilder( + int index) { + return getOpFieldBuilder().addBuilder( + index, org.tensorflow.framework.OpDef.getDefaultInstance()); + } + /** + * repeated .tensorflow.OpDef op = 1; + */ + public java.util.List + getOpBuilderList() { + return getOpFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef, org.tensorflow.framework.OpDef.Builder, org.tensorflow.framework.OpDefOrBuilder> + getOpFieldBuilder() { + if (opBuilder_ == null) { + opBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.OpDef, org.tensorflow.framework.OpDef.Builder, org.tensorflow.framework.OpDefOrBuilder>( + op_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + op_ = null; + } + return opBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.OpList) + } + + // @@protoc_insertion_point(class_scope:tensorflow.OpList) + private static final org.tensorflow.framework.OpList DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.OpList(); + } + + public static org.tensorflow.framework.OpList getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OpList parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OpList(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.OpList getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpListOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpListOrBuilder.java new file mode 100644 index 0000000000000..ff43744451cf3 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OpListOrBuilder.java @@ -0,0 +1,33 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/op_def.proto + +package org.tensorflow.framework; + +public interface OpListOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.OpList) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .tensorflow.OpDef op = 1; + */ + java.util.List + getOpList(); + /** + * repeated .tensorflow.OpDef op = 1; + */ + org.tensorflow.framework.OpDef getOp(int index); + /** + * repeated .tensorflow.OpDef op = 1; + */ + int getOpCount(); + /** + * repeated .tensorflow.OpDef op = 1; + */ + java.util.List + getOpOrBuilderList(); + /** + * repeated .tensorflow.OpDef op = 1; + */ + org.tensorflow.framework.OpDefOrBuilder getOpOrBuilder( + int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OptimizerOptions.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OptimizerOptions.java new file mode 100644 index 0000000000000..2af62c8d7221a --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OptimizerOptions.java @@ -0,0 +1,1032 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +/** + *
+ * Options passed to the graph optimizer
+ * 
+ * + * Protobuf type {@code tensorflow.OptimizerOptions} + */ +public final class OptimizerOptions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.OptimizerOptions) + OptimizerOptionsOrBuilder { + // Use OptimizerOptions.newBuilder() to construct. + private OptimizerOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private OptimizerOptions() { + doCommonSubexpressionElimination_ = false; + doConstantFolding_ = false; + doFunctionInlining_ = false; + optLevel_ = 0; + globalJitLevel_ = 0; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private OptimizerOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + doCommonSubexpressionElimination_ = input.readBool(); + break; + } + case 16: { + + doConstantFolding_ = input.readBool(); + break; + } + case 24: { + int rawValue = input.readEnum(); + + optLevel_ = rawValue; + break; + } + case 32: { + + doFunctionInlining_ = input.readBool(); + break; + } + case 40: { + int rawValue = input.readEnum(); + + globalJitLevel_ = rawValue; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_OptimizerOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_OptimizerOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OptimizerOptions.class, org.tensorflow.framework.OptimizerOptions.Builder.class); + } + + /** + *
+   * Optimization level
+   * 
+ * + * Protobuf enum {@code tensorflow.OptimizerOptions.Level} + */ + public enum Level + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     * L1 is the default level.
+     * Optimization performed at L1 :
+     * 1. Common subexpression elimination
+     * 2. Constant folding
+     * 
+ * + * L1 = 0; + */ + L1(0), + /** + *
+     * No optimizations
+     * 
+ * + * L0 = -1; + */ + L0(-1), + UNRECOGNIZED(-1), + ; + + /** + *
+     * L1 is the default level.
+     * Optimization performed at L1 :
+     * 1. Common subexpression elimination
+     * 2. Constant folding
+     * 
+ * + * L1 = 0; + */ + public static final int L1_VALUE = 0; + /** + *
+     * No optimizations
+     * 
+ * + * L0 = -1; + */ + public static final int L0_VALUE = -1; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Level valueOf(int value) { + return forNumber(value); + } + + public static Level forNumber(int value) { + switch (value) { + case 0: return L1; + case -1: return L0; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + Level> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Level findValueByNumber(int number) { + return Level.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.tensorflow.framework.OptimizerOptions.getDescriptor().getEnumTypes().get(0); + } + + private static final Level[] VALUES = values(); + + public static Level valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Level(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:tensorflow.OptimizerOptions.Level) + } + + /** + *
+   * Control the use of the compiler/jit.  Experimental.
+   * 
+ * + * Protobuf enum {@code tensorflow.OptimizerOptions.GlobalJitLevel} + */ + public enum GlobalJitLevel + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     * Default setting ("off" now, but later expected to be "on")
+     * 
+ * + * DEFAULT = 0; + */ + DEFAULT(0), + /** + * OFF = -1; + */ + OFF(-1), + /** + *
+     * The following settings turn on compilation, with higher values being
+     * more aggressive.  Higher values may reduce opportunities for parallelism
+     * and may use more memory.  (At present, there is no distinction, but this
+     * is expected to change.)
+     * 
+ * + * ON_1 = 1; + */ + ON_1(1), + /** + * ON_2 = 2; + */ + ON_2(2), + UNRECOGNIZED(-1), + ; + + /** + *
+     * Default setting ("off" now, but later expected to be "on")
+     * 
+ * + * DEFAULT = 0; + */ + public static final int DEFAULT_VALUE = 0; + /** + * OFF = -1; + */ + public static final int OFF_VALUE = -1; + /** + *
+     * The following settings turn on compilation, with higher values being
+     * more aggressive.  Higher values may reduce opportunities for parallelism
+     * and may use more memory.  (At present, there is no distinction, but this
+     * is expected to change.)
+     * 
+ * + * ON_1 = 1; + */ + public static final int ON_1_VALUE = 1; + /** + * ON_2 = 2; + */ + public static final int ON_2_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static GlobalJitLevel valueOf(int value) { + return forNumber(value); + } + + public static GlobalJitLevel forNumber(int value) { + switch (value) { + case 0: return DEFAULT; + case -1: return OFF; + case 1: return ON_1; + case 2: return ON_2; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + GlobalJitLevel> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public GlobalJitLevel findValueByNumber(int number) { + return GlobalJitLevel.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.tensorflow.framework.OptimizerOptions.getDescriptor().getEnumTypes().get(1); + } + + private static final GlobalJitLevel[] VALUES = values(); + + public static GlobalJitLevel valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private GlobalJitLevel(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:tensorflow.OptimizerOptions.GlobalJitLevel) + } + + public static final int DO_COMMON_SUBEXPRESSION_ELIMINATION_FIELD_NUMBER = 1; + private boolean doCommonSubexpressionElimination_; + /** + *
+   * If true, optimize the graph using common subexpression elimination.
+   * 
+ * + * optional bool do_common_subexpression_elimination = 1; + */ + public boolean getDoCommonSubexpressionElimination() { + return doCommonSubexpressionElimination_; + } + + public static final int DO_CONSTANT_FOLDING_FIELD_NUMBER = 2; + private boolean doConstantFolding_; + /** + *
+   * If true, perform constant folding optimization on the graph.
+   * 
+ * + * optional bool do_constant_folding = 2; + */ + public boolean getDoConstantFolding() { + return doConstantFolding_; + } + + public static final int DO_FUNCTION_INLINING_FIELD_NUMBER = 4; + private boolean doFunctionInlining_; + /** + *
+   * If true, perform function inlining on the graph.
+   * 
+ * + * optional bool do_function_inlining = 4; + */ + public boolean getDoFunctionInlining() { + return doFunctionInlining_; + } + + public static final int OPT_LEVEL_FIELD_NUMBER = 3; + private int optLevel_; + /** + * optional .tensorflow.OptimizerOptions.Level opt_level = 3; + */ + public int getOptLevelValue() { + return optLevel_; + } + /** + * optional .tensorflow.OptimizerOptions.Level opt_level = 3; + */ + public org.tensorflow.framework.OptimizerOptions.Level getOptLevel() { + org.tensorflow.framework.OptimizerOptions.Level result = org.tensorflow.framework.OptimizerOptions.Level.valueOf(optLevel_); + return result == null ? org.tensorflow.framework.OptimizerOptions.Level.UNRECOGNIZED : result; + } + + public static final int GLOBAL_JIT_LEVEL_FIELD_NUMBER = 5; + private int globalJitLevel_; + /** + * optional .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5; + */ + public int getGlobalJitLevelValue() { + return globalJitLevel_; + } + /** + * optional .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5; + */ + public org.tensorflow.framework.OptimizerOptions.GlobalJitLevel getGlobalJitLevel() { + org.tensorflow.framework.OptimizerOptions.GlobalJitLevel result = org.tensorflow.framework.OptimizerOptions.GlobalJitLevel.valueOf(globalJitLevel_); + return result == null ? org.tensorflow.framework.OptimizerOptions.GlobalJitLevel.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (doCommonSubexpressionElimination_ != false) { + output.writeBool(1, doCommonSubexpressionElimination_); + } + if (doConstantFolding_ != false) { + output.writeBool(2, doConstantFolding_); + } + if (optLevel_ != org.tensorflow.framework.OptimizerOptions.Level.L1.getNumber()) { + output.writeEnum(3, optLevel_); + } + if (doFunctionInlining_ != false) { + output.writeBool(4, doFunctionInlining_); + } + if (globalJitLevel_ != org.tensorflow.framework.OptimizerOptions.GlobalJitLevel.DEFAULT.getNumber()) { + output.writeEnum(5, globalJitLevel_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (doCommonSubexpressionElimination_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, doCommonSubexpressionElimination_); + } + if (doConstantFolding_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, doConstantFolding_); + } + if (optLevel_ != org.tensorflow.framework.OptimizerOptions.Level.L1.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(3, optLevel_); + } + if (doFunctionInlining_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, doFunctionInlining_); + } + if (globalJitLevel_ != org.tensorflow.framework.OptimizerOptions.GlobalJitLevel.DEFAULT.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(5, globalJitLevel_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.OptimizerOptions)) { + return super.equals(obj); + } + org.tensorflow.framework.OptimizerOptions other = (org.tensorflow.framework.OptimizerOptions) obj; + + boolean result = true; + result = result && (getDoCommonSubexpressionElimination() + == other.getDoCommonSubexpressionElimination()); + result = result && (getDoConstantFolding() + == other.getDoConstantFolding()); + result = result && (getDoFunctionInlining() + == other.getDoFunctionInlining()); + result = result && optLevel_ == other.optLevel_; + result = result && globalJitLevel_ == other.globalJitLevel_; + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + DO_COMMON_SUBEXPRESSION_ELIMINATION_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDoCommonSubexpressionElimination()); + hash = (37 * hash) + DO_CONSTANT_FOLDING_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDoConstantFolding()); + hash = (37 * hash) + DO_FUNCTION_INLINING_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getDoFunctionInlining()); + hash = (37 * hash) + OPT_LEVEL_FIELD_NUMBER; + hash = (53 * hash) + optLevel_; + hash = (37 * hash) + GLOBAL_JIT_LEVEL_FIELD_NUMBER; + hash = (53 * hash) + globalJitLevel_; + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.OptimizerOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OptimizerOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OptimizerOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.OptimizerOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.OptimizerOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OptimizerOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OptimizerOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OptimizerOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.OptimizerOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.OptimizerOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.OptimizerOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Options passed to the graph optimizer
+   * 
+ * + * Protobuf type {@code tensorflow.OptimizerOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.OptimizerOptions) + org.tensorflow.framework.OptimizerOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_OptimizerOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_OptimizerOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.OptimizerOptions.class, org.tensorflow.framework.OptimizerOptions.Builder.class); + } + + // Construct using org.tensorflow.framework.OptimizerOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + doCommonSubexpressionElimination_ = false; + + doConstantFolding_ = false; + + doFunctionInlining_ = false; + + optLevel_ = 0; + + globalJitLevel_ = 0; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_OptimizerOptions_descriptor; + } + + public org.tensorflow.framework.OptimizerOptions getDefaultInstanceForType() { + return org.tensorflow.framework.OptimizerOptions.getDefaultInstance(); + } + + public org.tensorflow.framework.OptimizerOptions build() { + org.tensorflow.framework.OptimizerOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.OptimizerOptions buildPartial() { + org.tensorflow.framework.OptimizerOptions result = new org.tensorflow.framework.OptimizerOptions(this); + result.doCommonSubexpressionElimination_ = doCommonSubexpressionElimination_; + result.doConstantFolding_ = doConstantFolding_; + result.doFunctionInlining_ = doFunctionInlining_; + result.optLevel_ = optLevel_; + result.globalJitLevel_ = globalJitLevel_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.OptimizerOptions) { + return mergeFrom((org.tensorflow.framework.OptimizerOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.OptimizerOptions other) { + if (other == org.tensorflow.framework.OptimizerOptions.getDefaultInstance()) return this; + if (other.getDoCommonSubexpressionElimination() != false) { + setDoCommonSubexpressionElimination(other.getDoCommonSubexpressionElimination()); + } + if (other.getDoConstantFolding() != false) { + setDoConstantFolding(other.getDoConstantFolding()); + } + if (other.getDoFunctionInlining() != false) { + setDoFunctionInlining(other.getDoFunctionInlining()); + } + if (other.optLevel_ != 0) { + setOptLevelValue(other.getOptLevelValue()); + } + if (other.globalJitLevel_ != 0) { + setGlobalJitLevelValue(other.getGlobalJitLevelValue()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.OptimizerOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.OptimizerOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private boolean doCommonSubexpressionElimination_ ; + /** + *
+     * If true, optimize the graph using common subexpression elimination.
+     * 
+ * + * optional bool do_common_subexpression_elimination = 1; + */ + public boolean getDoCommonSubexpressionElimination() { + return doCommonSubexpressionElimination_; + } + /** + *
+     * If true, optimize the graph using common subexpression elimination.
+     * 
+ * + * optional bool do_common_subexpression_elimination = 1; + */ + public Builder setDoCommonSubexpressionElimination(boolean value) { + + doCommonSubexpressionElimination_ = value; + onChanged(); + return this; + } + /** + *
+     * If true, optimize the graph using common subexpression elimination.
+     * 
+ * + * optional bool do_common_subexpression_elimination = 1; + */ + public Builder clearDoCommonSubexpressionElimination() { + + doCommonSubexpressionElimination_ = false; + onChanged(); + return this; + } + + private boolean doConstantFolding_ ; + /** + *
+     * If true, perform constant folding optimization on the graph.
+     * 
+ * + * optional bool do_constant_folding = 2; + */ + public boolean getDoConstantFolding() { + return doConstantFolding_; + } + /** + *
+     * If true, perform constant folding optimization on the graph.
+     * 
+ * + * optional bool do_constant_folding = 2; + */ + public Builder setDoConstantFolding(boolean value) { + + doConstantFolding_ = value; + onChanged(); + return this; + } + /** + *
+     * If true, perform constant folding optimization on the graph.
+     * 
+ * + * optional bool do_constant_folding = 2; + */ + public Builder clearDoConstantFolding() { + + doConstantFolding_ = false; + onChanged(); + return this; + } + + private boolean doFunctionInlining_ ; + /** + *
+     * If true, perform function inlining on the graph.
+     * 
+ * + * optional bool do_function_inlining = 4; + */ + public boolean getDoFunctionInlining() { + return doFunctionInlining_; + } + /** + *
+     * If true, perform function inlining on the graph.
+     * 
+ * + * optional bool do_function_inlining = 4; + */ + public Builder setDoFunctionInlining(boolean value) { + + doFunctionInlining_ = value; + onChanged(); + return this; + } + /** + *
+     * If true, perform function inlining on the graph.
+     * 
+ * + * optional bool do_function_inlining = 4; + */ + public Builder clearDoFunctionInlining() { + + doFunctionInlining_ = false; + onChanged(); + return this; + } + + private int optLevel_ = 0; + /** + * optional .tensorflow.OptimizerOptions.Level opt_level = 3; + */ + public int getOptLevelValue() { + return optLevel_; + } + /** + * optional .tensorflow.OptimizerOptions.Level opt_level = 3; + */ + public Builder setOptLevelValue(int value) { + optLevel_ = value; + onChanged(); + return this; + } + /** + * optional .tensorflow.OptimizerOptions.Level opt_level = 3; + */ + public org.tensorflow.framework.OptimizerOptions.Level getOptLevel() { + org.tensorflow.framework.OptimizerOptions.Level result = org.tensorflow.framework.OptimizerOptions.Level.valueOf(optLevel_); + return result == null ? org.tensorflow.framework.OptimizerOptions.Level.UNRECOGNIZED : result; + } + /** + * optional .tensorflow.OptimizerOptions.Level opt_level = 3; + */ + public Builder setOptLevel(org.tensorflow.framework.OptimizerOptions.Level value) { + if (value == null) { + throw new NullPointerException(); + } + + optLevel_ = value.getNumber(); + onChanged(); + return this; + } + /** + * optional .tensorflow.OptimizerOptions.Level opt_level = 3; + */ + public Builder clearOptLevel() { + + optLevel_ = 0; + onChanged(); + return this; + } + + private int globalJitLevel_ = 0; + /** + * optional .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5; + */ + public int getGlobalJitLevelValue() { + return globalJitLevel_; + } + /** + * optional .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5; + */ + public Builder setGlobalJitLevelValue(int value) { + globalJitLevel_ = value; + onChanged(); + return this; + } + /** + * optional .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5; + */ + public org.tensorflow.framework.OptimizerOptions.GlobalJitLevel getGlobalJitLevel() { + org.tensorflow.framework.OptimizerOptions.GlobalJitLevel result = org.tensorflow.framework.OptimizerOptions.GlobalJitLevel.valueOf(globalJitLevel_); + return result == null ? org.tensorflow.framework.OptimizerOptions.GlobalJitLevel.UNRECOGNIZED : result; + } + /** + * optional .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5; + */ + public Builder setGlobalJitLevel(org.tensorflow.framework.OptimizerOptions.GlobalJitLevel value) { + if (value == null) { + throw new NullPointerException(); + } + + globalJitLevel_ = value.getNumber(); + onChanged(); + return this; + } + /** + * optional .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5; + */ + public Builder clearGlobalJitLevel() { + + globalJitLevel_ = 0; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.OptimizerOptions) + } + + // @@protoc_insertion_point(class_scope:tensorflow.OptimizerOptions) + private static final org.tensorflow.framework.OptimizerOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.OptimizerOptions(); + } + + public static org.tensorflow.framework.OptimizerOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public OptimizerOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new OptimizerOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.OptimizerOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OptimizerOptionsOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OptimizerOptionsOrBuilder.java new file mode 100644 index 0000000000000..47007fedfb660 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/OptimizerOptionsOrBuilder.java @@ -0,0 +1,54 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +public interface OptimizerOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.OptimizerOptions) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * If true, optimize the graph using common subexpression elimination.
+   * 
+ * + * optional bool do_common_subexpression_elimination = 1; + */ + boolean getDoCommonSubexpressionElimination(); + + /** + *
+   * If true, perform constant folding optimization on the graph.
+   * 
+ * + * optional bool do_constant_folding = 2; + */ + boolean getDoConstantFolding(); + + /** + *
+   * If true, perform function inlining on the graph.
+   * 
+ * + * optional bool do_function_inlining = 4; + */ + boolean getDoFunctionInlining(); + + /** + * optional .tensorflow.OptimizerOptions.Level opt_level = 3; + */ + int getOptLevelValue(); + /** + * optional .tensorflow.OptimizerOptions.Level opt_level = 3; + */ + org.tensorflow.framework.OptimizerOptions.Level getOptLevel(); + + /** + * optional .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5; + */ + int getGlobalJitLevelValue(); + /** + * optional .tensorflow.OptimizerOptions.GlobalJitLevel global_jit_level = 5; + */ + org.tensorflow.framework.OptimizerOptions.GlobalJitLevel getGlobalJitLevel(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandle.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandle.java new file mode 100644 index 0000000000000..03684fa0f3022 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandle.java @@ -0,0 +1,1085 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/resource_handle.proto + +package org.tensorflow.framework; + +/** + *
+ * Protocol buffer representing a handle to a tensorflow resource. Handles are
+ * not valid across executions, but can be serialized back and forth from within
+ * a single run.
+ * 
+ * + * Protobuf type {@code tensorflow.ResourceHandle} + */ +public final class ResourceHandle extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.ResourceHandle) + ResourceHandleOrBuilder { + // Use ResourceHandle.newBuilder() to construct. + private ResourceHandle(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ResourceHandle() { + device_ = ""; + container_ = ""; + name_ = ""; + hashCode_ = 0L; + maybeTypeName_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private ResourceHandle( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + device_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + container_ = s; + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 32: { + + hashCode_ = input.readUInt64(); + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + maybeTypeName_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ResourceHandleProto.internal_static_tensorflow_ResourceHandle_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ResourceHandleProto.internal_static_tensorflow_ResourceHandle_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.ResourceHandle.class, org.tensorflow.framework.ResourceHandle.Builder.class); + } + + public static final int DEVICE_FIELD_NUMBER = 1; + private volatile java.lang.Object device_; + /** + *
+   * Unique name for the device containing the resource.
+   * 
+ * + * optional string device = 1; + */ + public java.lang.String getDevice() { + java.lang.Object ref = device_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + device_ = s; + return s; + } + } + /** + *
+   * Unique name for the device containing the resource.
+   * 
+ * + * optional string device = 1; + */ + public com.google.protobuf.ByteString + getDeviceBytes() { + java.lang.Object ref = device_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + device_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CONTAINER_FIELD_NUMBER = 2; + private volatile java.lang.Object container_; + /** + *
+   * Container in which this resource is placed.
+   * 
+ * + * optional string container = 2; + */ + public java.lang.String getContainer() { + java.lang.Object ref = container_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + container_ = s; + return s; + } + } + /** + *
+   * Container in which this resource is placed.
+   * 
+ * + * optional string container = 2; + */ + public com.google.protobuf.ByteString + getContainerBytes() { + java.lang.Object ref = container_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + container_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int NAME_FIELD_NUMBER = 3; + private volatile java.lang.Object name_; + /** + *
+   * Unique name of this resource.
+   * 
+ * + * optional string name = 3; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+   * Unique name of this resource.
+   * 
+ * + * optional string name = 3; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int HASH_CODE_FIELD_NUMBER = 4; + private long hashCode_; + /** + *
+   * Hash code for the type of the resource. Is only valid in the same device
+   * and in the same execution.
+   * 
+ * + * optional uint64 hash_code = 4; + */ + public long getHashCode() { + return hashCode_; + } + + public static final int MAYBE_TYPE_NAME_FIELD_NUMBER = 5; + private volatile java.lang.Object maybeTypeName_; + /** + *
+   * For debug-only, the name of the type pointed to by this handle, if
+   * available.
+   * 
+ * + * optional string maybe_type_name = 5; + */ + public java.lang.String getMaybeTypeName() { + java.lang.Object ref = maybeTypeName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + maybeTypeName_ = s; + return s; + } + } + /** + *
+   * For debug-only, the name of the type pointed to by this handle, if
+   * available.
+   * 
+ * + * optional string maybe_type_name = 5; + */ + public com.google.protobuf.ByteString + getMaybeTypeNameBytes() { + java.lang.Object ref = maybeTypeName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + maybeTypeName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getDeviceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, device_); + } + if (!getContainerBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, container_); + } + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, name_); + } + if (hashCode_ != 0L) { + output.writeUInt64(4, hashCode_); + } + if (!getMaybeTypeNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, maybeTypeName_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getDeviceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, device_); + } + if (!getContainerBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, container_); + } + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, name_); + } + if (hashCode_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, hashCode_); + } + if (!getMaybeTypeNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, maybeTypeName_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.ResourceHandle)) { + return super.equals(obj); + } + org.tensorflow.framework.ResourceHandle other = (org.tensorflow.framework.ResourceHandle) obj; + + boolean result = true; + result = result && getDevice() + .equals(other.getDevice()); + result = result && getContainer() + .equals(other.getContainer()); + result = result && getName() + .equals(other.getName()); + result = result && (getHashCode() + == other.getHashCode()); + result = result && getMaybeTypeName() + .equals(other.getMaybeTypeName()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + DEVICE_FIELD_NUMBER; + hash = (53 * hash) + getDevice().hashCode(); + hash = (37 * hash) + CONTAINER_FIELD_NUMBER; + hash = (53 * hash) + getContainer().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + HASH_CODE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getHashCode()); + hash = (37 * hash) + MAYBE_TYPE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getMaybeTypeName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.ResourceHandle parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.ResourceHandle parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.ResourceHandle parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.ResourceHandle parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.ResourceHandle parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.ResourceHandle parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.ResourceHandle parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.ResourceHandle parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.ResourceHandle parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.ResourceHandle parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.ResourceHandle prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * Protocol buffer representing a handle to a tensorflow resource. Handles are
+   * not valid across executions, but can be serialized back and forth from within
+   * a single run.
+   * 
+ * + * Protobuf type {@code tensorflow.ResourceHandle} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.ResourceHandle) + org.tensorflow.framework.ResourceHandleOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ResourceHandleProto.internal_static_tensorflow_ResourceHandle_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ResourceHandleProto.internal_static_tensorflow_ResourceHandle_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.ResourceHandle.class, org.tensorflow.framework.ResourceHandle.Builder.class); + } + + // Construct using org.tensorflow.framework.ResourceHandle.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + device_ = ""; + + container_ = ""; + + name_ = ""; + + hashCode_ = 0L; + + maybeTypeName_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.ResourceHandleProto.internal_static_tensorflow_ResourceHandle_descriptor; + } + + public org.tensorflow.framework.ResourceHandle getDefaultInstanceForType() { + return org.tensorflow.framework.ResourceHandle.getDefaultInstance(); + } + + public org.tensorflow.framework.ResourceHandle build() { + org.tensorflow.framework.ResourceHandle result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.ResourceHandle buildPartial() { + org.tensorflow.framework.ResourceHandle result = new org.tensorflow.framework.ResourceHandle(this); + result.device_ = device_; + result.container_ = container_; + result.name_ = name_; + result.hashCode_ = hashCode_; + result.maybeTypeName_ = maybeTypeName_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.ResourceHandle) { + return mergeFrom((org.tensorflow.framework.ResourceHandle)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.ResourceHandle other) { + if (other == org.tensorflow.framework.ResourceHandle.getDefaultInstance()) return this; + if (!other.getDevice().isEmpty()) { + device_ = other.device_; + onChanged(); + } + if (!other.getContainer().isEmpty()) { + container_ = other.container_; + onChanged(); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.getHashCode() != 0L) { + setHashCode(other.getHashCode()); + } + if (!other.getMaybeTypeName().isEmpty()) { + maybeTypeName_ = other.maybeTypeName_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.ResourceHandle parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.ResourceHandle) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object device_ = ""; + /** + *
+     * Unique name for the device containing the resource.
+     * 
+ * + * optional string device = 1; + */ + public java.lang.String getDevice() { + java.lang.Object ref = device_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + device_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Unique name for the device containing the resource.
+     * 
+ * + * optional string device = 1; + */ + public com.google.protobuf.ByteString + getDeviceBytes() { + java.lang.Object ref = device_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + device_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Unique name for the device containing the resource.
+     * 
+ * + * optional string device = 1; + */ + public Builder setDevice( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + device_ = value; + onChanged(); + return this; + } + /** + *
+     * Unique name for the device containing the resource.
+     * 
+ * + * optional string device = 1; + */ + public Builder clearDevice() { + + device_ = getDefaultInstance().getDevice(); + onChanged(); + return this; + } + /** + *
+     * Unique name for the device containing the resource.
+     * 
+ * + * optional string device = 1; + */ + public Builder setDeviceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + device_ = value; + onChanged(); + return this; + } + + private java.lang.Object container_ = ""; + /** + *
+     * Container in which this resource is placed.
+     * 
+ * + * optional string container = 2; + */ + public java.lang.String getContainer() { + java.lang.Object ref = container_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + container_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Container in which this resource is placed.
+     * 
+ * + * optional string container = 2; + */ + public com.google.protobuf.ByteString + getContainerBytes() { + java.lang.Object ref = container_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + container_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Container in which this resource is placed.
+     * 
+ * + * optional string container = 2; + */ + public Builder setContainer( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + container_ = value; + onChanged(); + return this; + } + /** + *
+     * Container in which this resource is placed.
+     * 
+ * + * optional string container = 2; + */ + public Builder clearContainer() { + + container_ = getDefaultInstance().getContainer(); + onChanged(); + return this; + } + /** + *
+     * Container in which this resource is placed.
+     * 
+ * + * optional string container = 2; + */ + public Builder setContainerBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + container_ = value; + onChanged(); + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+     * Unique name of this resource.
+     * 
+ * + * optional string name = 3; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Unique name of this resource.
+     * 
+ * + * optional string name = 3; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Unique name of this resource.
+     * 
+ * + * optional string name = 3; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+     * Unique name of this resource.
+     * 
+ * + * optional string name = 3; + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+     * Unique name of this resource.
+     * 
+ * + * optional string name = 3; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private long hashCode_ ; + /** + *
+     * Hash code for the type of the resource. Is only valid in the same device
+     * and in the same execution.
+     * 
+ * + * optional uint64 hash_code = 4; + */ + public long getHashCode() { + return hashCode_; + } + /** + *
+     * Hash code for the type of the resource. Is only valid in the same device
+     * and in the same execution.
+     * 
+ * + * optional uint64 hash_code = 4; + */ + public Builder setHashCode(long value) { + + hashCode_ = value; + onChanged(); + return this; + } + /** + *
+     * Hash code for the type of the resource. Is only valid in the same device
+     * and in the same execution.
+     * 
+ * + * optional uint64 hash_code = 4; + */ + public Builder clearHashCode() { + + hashCode_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object maybeTypeName_ = ""; + /** + *
+     * For debug-only, the name of the type pointed to by this handle, if
+     * available.
+     * 
+ * + * optional string maybe_type_name = 5; + */ + public java.lang.String getMaybeTypeName() { + java.lang.Object ref = maybeTypeName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + maybeTypeName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * For debug-only, the name of the type pointed to by this handle, if
+     * available.
+     * 
+ * + * optional string maybe_type_name = 5; + */ + public com.google.protobuf.ByteString + getMaybeTypeNameBytes() { + java.lang.Object ref = maybeTypeName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + maybeTypeName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * For debug-only, the name of the type pointed to by this handle, if
+     * available.
+     * 
+ * + * optional string maybe_type_name = 5; + */ + public Builder setMaybeTypeName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + maybeTypeName_ = value; + onChanged(); + return this; + } + /** + *
+     * For debug-only, the name of the type pointed to by this handle, if
+     * available.
+     * 
+ * + * optional string maybe_type_name = 5; + */ + public Builder clearMaybeTypeName() { + + maybeTypeName_ = getDefaultInstance().getMaybeTypeName(); + onChanged(); + return this; + } + /** + *
+     * For debug-only, the name of the type pointed to by this handle, if
+     * available.
+     * 
+ * + * optional string maybe_type_name = 5; + */ + public Builder setMaybeTypeNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + maybeTypeName_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.ResourceHandle) + } + + // @@protoc_insertion_point(class_scope:tensorflow.ResourceHandle) + private static final org.tensorflow.framework.ResourceHandle DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.ResourceHandle(); + } + + public static org.tensorflow.framework.ResourceHandle getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public ResourceHandle parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ResourceHandle(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.ResourceHandle getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandleOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandleOrBuilder.java new file mode 100644 index 0000000000000..aa25ef4212c6f --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandleOrBuilder.java @@ -0,0 +1,93 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/resource_handle.proto + +package org.tensorflow.framework; + +public interface ResourceHandleOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.ResourceHandle) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Unique name for the device containing the resource.
+   * 
+ * + * optional string device = 1; + */ + java.lang.String getDevice(); + /** + *
+   * Unique name for the device containing the resource.
+   * 
+ * + * optional string device = 1; + */ + com.google.protobuf.ByteString + getDeviceBytes(); + + /** + *
+   * Container in which this resource is placed.
+   * 
+ * + * optional string container = 2; + */ + java.lang.String getContainer(); + /** + *
+   * Container in which this resource is placed.
+   * 
+ * + * optional string container = 2; + */ + com.google.protobuf.ByteString + getContainerBytes(); + + /** + *
+   * Unique name of this resource.
+   * 
+ * + * optional string name = 3; + */ + java.lang.String getName(); + /** + *
+   * Unique name of this resource.
+   * 
+ * + * optional string name = 3; + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+   * Hash code for the type of the resource. Is only valid in the same device
+   * and in the same execution.
+   * 
+ * + * optional uint64 hash_code = 4; + */ + long getHashCode(); + + /** + *
+   * For debug-only, the name of the type pointed to by this handle, if
+   * available.
+   * 
+ * + * optional string maybe_type_name = 5; + */ + java.lang.String getMaybeTypeName(); + /** + *
+   * For debug-only, the name of the type pointed to by this handle, if
+   * available.
+   * 
+ * + * optional string maybe_type_name = 5; + */ + com.google.protobuf.ByteString + getMaybeTypeNameBytes(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandleProto.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandleProto.java new file mode 100644 index 0000000000000..08238d956b2d0 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/ResourceHandleProto.java @@ -0,0 +1,59 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/resource_handle.proto + +package org.tensorflow.framework; + +public final class ResourceHandleProto { + private ResourceHandleProto() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_ResourceHandle_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_ResourceHandle_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n/tensorflow/core/framework/resource_han" + + "dle.proto\022\ntensorflow\"m\n\016ResourceHandle\022" + + "\016\n\006device\030\001 \001(\t\022\021\n\tcontainer\030\002 \001(\t\022\014\n\004na" + + "me\030\003 \001(\t\022\021\n\thash_code\030\004 \001(\004\022\027\n\017maybe_typ" + + "e_name\030\005 \001(\tB4\n\030org.tensorflow.framework" + + "B\023ResourceHandleProtoP\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + internal_static_tensorflow_ResourceHandle_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_ResourceHandle_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_ResourceHandle_descriptor, + new java.lang.String[] { "Device", "Container", "Name", "HashCode", "MaybeTypeName", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunMetadata.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunMetadata.java new file mode 100644 index 0000000000000..26a1986596d60 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunMetadata.java @@ -0,0 +1,1297 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +/** + *
+ * EXPERIMENTAL. Metadata output (i.e., non-Tensor) for a single Run() call.
+ * 
+ * + * Protobuf type {@code tensorflow.RunMetadata} + */ +public final class RunMetadata extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.RunMetadata) + RunMetadataOrBuilder { + // Use RunMetadata.newBuilder() to construct. + private RunMetadata(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private RunMetadata() { + partitionGraphs_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private RunMetadata( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + org.tensorflow.framework.StepStats.Builder subBuilder = null; + if (stepStats_ != null) { + subBuilder = stepStats_.toBuilder(); + } + stepStats_ = input.readMessage(org.tensorflow.framework.StepStats.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(stepStats_); + stepStats_ = subBuilder.buildPartial(); + } + + break; + } + case 18: { + org.tensorflow.framework.CostGraphDef.Builder subBuilder = null; + if (costGraph_ != null) { + subBuilder = costGraph_.toBuilder(); + } + costGraph_ = input.readMessage(org.tensorflow.framework.CostGraphDef.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(costGraph_); + costGraph_ = subBuilder.buildPartial(); + } + + break; + } + case 26: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + partitionGraphs_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + partitionGraphs_.add( + input.readMessage(org.tensorflow.framework.GraphDef.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + partitionGraphs_ = java.util.Collections.unmodifiableList(partitionGraphs_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunMetadata_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunMetadata_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.RunMetadata.class, org.tensorflow.framework.RunMetadata.Builder.class); + } + + private int bitField0_; + public static final int STEP_STATS_FIELD_NUMBER = 1; + private org.tensorflow.framework.StepStats stepStats_; + /** + *
+   * Statistics traced for this step. Populated if tracing is turned on via the
+   * "RunOptions" proto.
+   * EXPERIMENTAL: The format and set of events may change in future versions.
+   * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public boolean hasStepStats() { + return stepStats_ != null; + } + /** + *
+   * Statistics traced for this step. Populated if tracing is turned on via the
+   * "RunOptions" proto.
+   * EXPERIMENTAL: The format and set of events may change in future versions.
+   * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public org.tensorflow.framework.StepStats getStepStats() { + return stepStats_ == null ? org.tensorflow.framework.StepStats.getDefaultInstance() : stepStats_; + } + /** + *
+   * Statistics traced for this step. Populated if tracing is turned on via the
+   * "RunOptions" proto.
+   * EXPERIMENTAL: The format and set of events may change in future versions.
+   * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public org.tensorflow.framework.StepStatsOrBuilder getStepStatsOrBuilder() { + return getStepStats(); + } + + public static final int COST_GRAPH_FIELD_NUMBER = 2; + private org.tensorflow.framework.CostGraphDef costGraph_; + /** + *
+   * The cost graph for the computation defined by the run call.
+   * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public boolean hasCostGraph() { + return costGraph_ != null; + } + /** + *
+   * The cost graph for the computation defined by the run call.
+   * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public org.tensorflow.framework.CostGraphDef getCostGraph() { + return costGraph_ == null ? org.tensorflow.framework.CostGraphDef.getDefaultInstance() : costGraph_; + } + /** + *
+   * The cost graph for the computation defined by the run call.
+   * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public org.tensorflow.framework.CostGraphDefOrBuilder getCostGraphOrBuilder() { + return getCostGraph(); + } + + public static final int PARTITION_GRAPHS_FIELD_NUMBER = 3; + private java.util.List partitionGraphs_; + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public java.util.List getPartitionGraphsList() { + return partitionGraphs_; + } + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public java.util.List + getPartitionGraphsOrBuilderList() { + return partitionGraphs_; + } + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public int getPartitionGraphsCount() { + return partitionGraphs_.size(); + } + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public org.tensorflow.framework.GraphDef getPartitionGraphs(int index) { + return partitionGraphs_.get(index); + } + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public org.tensorflow.framework.GraphDefOrBuilder getPartitionGraphsOrBuilder( + int index) { + return partitionGraphs_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (stepStats_ != null) { + output.writeMessage(1, getStepStats()); + } + if (costGraph_ != null) { + output.writeMessage(2, getCostGraph()); + } + for (int i = 0; i < partitionGraphs_.size(); i++) { + output.writeMessage(3, partitionGraphs_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (stepStats_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getStepStats()); + } + if (costGraph_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getCostGraph()); + } + for (int i = 0; i < partitionGraphs_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, partitionGraphs_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.RunMetadata)) { + return super.equals(obj); + } + org.tensorflow.framework.RunMetadata other = (org.tensorflow.framework.RunMetadata) obj; + + boolean result = true; + result = result && (hasStepStats() == other.hasStepStats()); + if (hasStepStats()) { + result = result && getStepStats() + .equals(other.getStepStats()); + } + result = result && (hasCostGraph() == other.hasCostGraph()); + if (hasCostGraph()) { + result = result && getCostGraph() + .equals(other.getCostGraph()); + } + result = result && getPartitionGraphsList() + .equals(other.getPartitionGraphsList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasStepStats()) { + hash = (37 * hash) + STEP_STATS_FIELD_NUMBER; + hash = (53 * hash) + getStepStats().hashCode(); + } + if (hasCostGraph()) { + hash = (37 * hash) + COST_GRAPH_FIELD_NUMBER; + hash = (53 * hash) + getCostGraph().hashCode(); + } + if (getPartitionGraphsCount() > 0) { + hash = (37 * hash) + PARTITION_GRAPHS_FIELD_NUMBER; + hash = (53 * hash) + getPartitionGraphsList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.RunMetadata parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.RunMetadata parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.RunMetadata parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.RunMetadata parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.RunMetadata parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.RunMetadata parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.RunMetadata parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.RunMetadata parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.RunMetadata parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.RunMetadata parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.RunMetadata prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * EXPERIMENTAL. Metadata output (i.e., non-Tensor) for a single Run() call.
+   * 
+ * + * Protobuf type {@code tensorflow.RunMetadata} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.RunMetadata) + org.tensorflow.framework.RunMetadataOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunMetadata_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunMetadata_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.RunMetadata.class, org.tensorflow.framework.RunMetadata.Builder.class); + } + + // Construct using org.tensorflow.framework.RunMetadata.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getPartitionGraphsFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (stepStatsBuilder_ == null) { + stepStats_ = null; + } else { + stepStats_ = null; + stepStatsBuilder_ = null; + } + if (costGraphBuilder_ == null) { + costGraph_ = null; + } else { + costGraph_ = null; + costGraphBuilder_ = null; + } + if (partitionGraphsBuilder_ == null) { + partitionGraphs_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + partitionGraphsBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunMetadata_descriptor; + } + + public org.tensorflow.framework.RunMetadata getDefaultInstanceForType() { + return org.tensorflow.framework.RunMetadata.getDefaultInstance(); + } + + public org.tensorflow.framework.RunMetadata build() { + org.tensorflow.framework.RunMetadata result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.RunMetadata buildPartial() { + org.tensorflow.framework.RunMetadata result = new org.tensorflow.framework.RunMetadata(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (stepStatsBuilder_ == null) { + result.stepStats_ = stepStats_; + } else { + result.stepStats_ = stepStatsBuilder_.build(); + } + if (costGraphBuilder_ == null) { + result.costGraph_ = costGraph_; + } else { + result.costGraph_ = costGraphBuilder_.build(); + } + if (partitionGraphsBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + partitionGraphs_ = java.util.Collections.unmodifiableList(partitionGraphs_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.partitionGraphs_ = partitionGraphs_; + } else { + result.partitionGraphs_ = partitionGraphsBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.RunMetadata) { + return mergeFrom((org.tensorflow.framework.RunMetadata)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.RunMetadata other) { + if (other == org.tensorflow.framework.RunMetadata.getDefaultInstance()) return this; + if (other.hasStepStats()) { + mergeStepStats(other.getStepStats()); + } + if (other.hasCostGraph()) { + mergeCostGraph(other.getCostGraph()); + } + if (partitionGraphsBuilder_ == null) { + if (!other.partitionGraphs_.isEmpty()) { + if (partitionGraphs_.isEmpty()) { + partitionGraphs_ = other.partitionGraphs_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensurePartitionGraphsIsMutable(); + partitionGraphs_.addAll(other.partitionGraphs_); + } + onChanged(); + } + } else { + if (!other.partitionGraphs_.isEmpty()) { + if (partitionGraphsBuilder_.isEmpty()) { + partitionGraphsBuilder_.dispose(); + partitionGraphsBuilder_ = null; + partitionGraphs_ = other.partitionGraphs_; + bitField0_ = (bitField0_ & ~0x00000004); + partitionGraphsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getPartitionGraphsFieldBuilder() : null; + } else { + partitionGraphsBuilder_.addAllMessages(other.partitionGraphs_); + } + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.RunMetadata parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.RunMetadata) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private org.tensorflow.framework.StepStats stepStats_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.StepStats, org.tensorflow.framework.StepStats.Builder, org.tensorflow.framework.StepStatsOrBuilder> stepStatsBuilder_; + /** + *
+     * Statistics traced for this step. Populated if tracing is turned on via the
+     * "RunOptions" proto.
+     * EXPERIMENTAL: The format and set of events may change in future versions.
+     * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public boolean hasStepStats() { + return stepStatsBuilder_ != null || stepStats_ != null; + } + /** + *
+     * Statistics traced for this step. Populated if tracing is turned on via the
+     * "RunOptions" proto.
+     * EXPERIMENTAL: The format and set of events may change in future versions.
+     * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public org.tensorflow.framework.StepStats getStepStats() { + if (stepStatsBuilder_ == null) { + return stepStats_ == null ? org.tensorflow.framework.StepStats.getDefaultInstance() : stepStats_; + } else { + return stepStatsBuilder_.getMessage(); + } + } + /** + *
+     * Statistics traced for this step. Populated if tracing is turned on via the
+     * "RunOptions" proto.
+     * EXPERIMENTAL: The format and set of events may change in future versions.
+     * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public Builder setStepStats(org.tensorflow.framework.StepStats value) { + if (stepStatsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + stepStats_ = value; + onChanged(); + } else { + stepStatsBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * Statistics traced for this step. Populated if tracing is turned on via the
+     * "RunOptions" proto.
+     * EXPERIMENTAL: The format and set of events may change in future versions.
+     * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public Builder setStepStats( + org.tensorflow.framework.StepStats.Builder builderForValue) { + if (stepStatsBuilder_ == null) { + stepStats_ = builderForValue.build(); + onChanged(); + } else { + stepStatsBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * Statistics traced for this step. Populated if tracing is turned on via the
+     * "RunOptions" proto.
+     * EXPERIMENTAL: The format and set of events may change in future versions.
+     * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public Builder mergeStepStats(org.tensorflow.framework.StepStats value) { + if (stepStatsBuilder_ == null) { + if (stepStats_ != null) { + stepStats_ = + org.tensorflow.framework.StepStats.newBuilder(stepStats_).mergeFrom(value).buildPartial(); + } else { + stepStats_ = value; + } + onChanged(); + } else { + stepStatsBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * Statistics traced for this step. Populated if tracing is turned on via the
+     * "RunOptions" proto.
+     * EXPERIMENTAL: The format and set of events may change in future versions.
+     * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public Builder clearStepStats() { + if (stepStatsBuilder_ == null) { + stepStats_ = null; + onChanged(); + } else { + stepStats_ = null; + stepStatsBuilder_ = null; + } + + return this; + } + /** + *
+     * Statistics traced for this step. Populated if tracing is turned on via the
+     * "RunOptions" proto.
+     * EXPERIMENTAL: The format and set of events may change in future versions.
+     * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public org.tensorflow.framework.StepStats.Builder getStepStatsBuilder() { + + onChanged(); + return getStepStatsFieldBuilder().getBuilder(); + } + /** + *
+     * Statistics traced for this step. Populated if tracing is turned on via the
+     * "RunOptions" proto.
+     * EXPERIMENTAL: The format and set of events may change in future versions.
+     * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + public org.tensorflow.framework.StepStatsOrBuilder getStepStatsOrBuilder() { + if (stepStatsBuilder_ != null) { + return stepStatsBuilder_.getMessageOrBuilder(); + } else { + return stepStats_ == null ? + org.tensorflow.framework.StepStats.getDefaultInstance() : stepStats_; + } + } + /** + *
+     * Statistics traced for this step. Populated if tracing is turned on via the
+     * "RunOptions" proto.
+     * EXPERIMENTAL: The format and set of events may change in future versions.
+     * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.StepStats, org.tensorflow.framework.StepStats.Builder, org.tensorflow.framework.StepStatsOrBuilder> + getStepStatsFieldBuilder() { + if (stepStatsBuilder_ == null) { + stepStatsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.StepStats, org.tensorflow.framework.StepStats.Builder, org.tensorflow.framework.StepStatsOrBuilder>( + getStepStats(), + getParentForChildren(), + isClean()); + stepStats_ = null; + } + return stepStatsBuilder_; + } + + private org.tensorflow.framework.CostGraphDef costGraph_ = null; + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.CostGraphDef, org.tensorflow.framework.CostGraphDef.Builder, org.tensorflow.framework.CostGraphDefOrBuilder> costGraphBuilder_; + /** + *
+     * The cost graph for the computation defined by the run call.
+     * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public boolean hasCostGraph() { + return costGraphBuilder_ != null || costGraph_ != null; + } + /** + *
+     * The cost graph for the computation defined by the run call.
+     * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public org.tensorflow.framework.CostGraphDef getCostGraph() { + if (costGraphBuilder_ == null) { + return costGraph_ == null ? org.tensorflow.framework.CostGraphDef.getDefaultInstance() : costGraph_; + } else { + return costGraphBuilder_.getMessage(); + } + } + /** + *
+     * The cost graph for the computation defined by the run call.
+     * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public Builder setCostGraph(org.tensorflow.framework.CostGraphDef value) { + if (costGraphBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + costGraph_ = value; + onChanged(); + } else { + costGraphBuilder_.setMessage(value); + } + + return this; + } + /** + *
+     * The cost graph for the computation defined by the run call.
+     * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public Builder setCostGraph( + org.tensorflow.framework.CostGraphDef.Builder builderForValue) { + if (costGraphBuilder_ == null) { + costGraph_ = builderForValue.build(); + onChanged(); + } else { + costGraphBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+     * The cost graph for the computation defined by the run call.
+     * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public Builder mergeCostGraph(org.tensorflow.framework.CostGraphDef value) { + if (costGraphBuilder_ == null) { + if (costGraph_ != null) { + costGraph_ = + org.tensorflow.framework.CostGraphDef.newBuilder(costGraph_).mergeFrom(value).buildPartial(); + } else { + costGraph_ = value; + } + onChanged(); + } else { + costGraphBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+     * The cost graph for the computation defined by the run call.
+     * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public Builder clearCostGraph() { + if (costGraphBuilder_ == null) { + costGraph_ = null; + onChanged(); + } else { + costGraph_ = null; + costGraphBuilder_ = null; + } + + return this; + } + /** + *
+     * The cost graph for the computation defined by the run call.
+     * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public org.tensorflow.framework.CostGraphDef.Builder getCostGraphBuilder() { + + onChanged(); + return getCostGraphFieldBuilder().getBuilder(); + } + /** + *
+     * The cost graph for the computation defined by the run call.
+     * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + public org.tensorflow.framework.CostGraphDefOrBuilder getCostGraphOrBuilder() { + if (costGraphBuilder_ != null) { + return costGraphBuilder_.getMessageOrBuilder(); + } else { + return costGraph_ == null ? + org.tensorflow.framework.CostGraphDef.getDefaultInstance() : costGraph_; + } + } + /** + *
+     * The cost graph for the computation defined by the run call.
+     * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.CostGraphDef, org.tensorflow.framework.CostGraphDef.Builder, org.tensorflow.framework.CostGraphDefOrBuilder> + getCostGraphFieldBuilder() { + if (costGraphBuilder_ == null) { + costGraphBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.tensorflow.framework.CostGraphDef, org.tensorflow.framework.CostGraphDef.Builder, org.tensorflow.framework.CostGraphDefOrBuilder>( + getCostGraph(), + getParentForChildren(), + isClean()); + costGraph_ = null; + } + return costGraphBuilder_; + } + + private java.util.List partitionGraphs_ = + java.util.Collections.emptyList(); + private void ensurePartitionGraphsIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + partitionGraphs_ = new java.util.ArrayList(partitionGraphs_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.GraphDef, org.tensorflow.framework.GraphDef.Builder, org.tensorflow.framework.GraphDefOrBuilder> partitionGraphsBuilder_; + + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public java.util.List getPartitionGraphsList() { + if (partitionGraphsBuilder_ == null) { + return java.util.Collections.unmodifiableList(partitionGraphs_); + } else { + return partitionGraphsBuilder_.getMessageList(); + } + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public int getPartitionGraphsCount() { + if (partitionGraphsBuilder_ == null) { + return partitionGraphs_.size(); + } else { + return partitionGraphsBuilder_.getCount(); + } + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public org.tensorflow.framework.GraphDef getPartitionGraphs(int index) { + if (partitionGraphsBuilder_ == null) { + return partitionGraphs_.get(index); + } else { + return partitionGraphsBuilder_.getMessage(index); + } + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public Builder setPartitionGraphs( + int index, org.tensorflow.framework.GraphDef value) { + if (partitionGraphsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePartitionGraphsIsMutable(); + partitionGraphs_.set(index, value); + onChanged(); + } else { + partitionGraphsBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public Builder setPartitionGraphs( + int index, org.tensorflow.framework.GraphDef.Builder builderForValue) { + if (partitionGraphsBuilder_ == null) { + ensurePartitionGraphsIsMutable(); + partitionGraphs_.set(index, builderForValue.build()); + onChanged(); + } else { + partitionGraphsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public Builder addPartitionGraphs(org.tensorflow.framework.GraphDef value) { + if (partitionGraphsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePartitionGraphsIsMutable(); + partitionGraphs_.add(value); + onChanged(); + } else { + partitionGraphsBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public Builder addPartitionGraphs( + int index, org.tensorflow.framework.GraphDef value) { + if (partitionGraphsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePartitionGraphsIsMutable(); + partitionGraphs_.add(index, value); + onChanged(); + } else { + partitionGraphsBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public Builder addPartitionGraphs( + org.tensorflow.framework.GraphDef.Builder builderForValue) { + if (partitionGraphsBuilder_ == null) { + ensurePartitionGraphsIsMutable(); + partitionGraphs_.add(builderForValue.build()); + onChanged(); + } else { + partitionGraphsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public Builder addPartitionGraphs( + int index, org.tensorflow.framework.GraphDef.Builder builderForValue) { + if (partitionGraphsBuilder_ == null) { + ensurePartitionGraphsIsMutable(); + partitionGraphs_.add(index, builderForValue.build()); + onChanged(); + } else { + partitionGraphsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public Builder addAllPartitionGraphs( + java.lang.Iterable values) { + if (partitionGraphsBuilder_ == null) { + ensurePartitionGraphsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, partitionGraphs_); + onChanged(); + } else { + partitionGraphsBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public Builder clearPartitionGraphs() { + if (partitionGraphsBuilder_ == null) { + partitionGraphs_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + partitionGraphsBuilder_.clear(); + } + return this; + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public Builder removePartitionGraphs(int index) { + if (partitionGraphsBuilder_ == null) { + ensurePartitionGraphsIsMutable(); + partitionGraphs_.remove(index); + onChanged(); + } else { + partitionGraphsBuilder_.remove(index); + } + return this; + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public org.tensorflow.framework.GraphDef.Builder getPartitionGraphsBuilder( + int index) { + return getPartitionGraphsFieldBuilder().getBuilder(index); + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public org.tensorflow.framework.GraphDefOrBuilder getPartitionGraphsOrBuilder( + int index) { + if (partitionGraphsBuilder_ == null) { + return partitionGraphs_.get(index); } else { + return partitionGraphsBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public java.util.List + getPartitionGraphsOrBuilderList() { + if (partitionGraphsBuilder_ != null) { + return partitionGraphsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(partitionGraphs_); + } + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public org.tensorflow.framework.GraphDef.Builder addPartitionGraphsBuilder() { + return getPartitionGraphsFieldBuilder().addBuilder( + org.tensorflow.framework.GraphDef.getDefaultInstance()); + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public org.tensorflow.framework.GraphDef.Builder addPartitionGraphsBuilder( + int index) { + return getPartitionGraphsFieldBuilder().addBuilder( + index, org.tensorflow.framework.GraphDef.getDefaultInstance()); + } + /** + *
+     * Graphs of the partitions executed by executors.
+     * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + public java.util.List + getPartitionGraphsBuilderList() { + return getPartitionGraphsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.GraphDef, org.tensorflow.framework.GraphDef.Builder, org.tensorflow.framework.GraphDefOrBuilder> + getPartitionGraphsFieldBuilder() { + if (partitionGraphsBuilder_ == null) { + partitionGraphsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.GraphDef, org.tensorflow.framework.GraphDef.Builder, org.tensorflow.framework.GraphDefOrBuilder>( + partitionGraphs_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + partitionGraphs_ = null; + } + return partitionGraphsBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.RunMetadata) + } + + // @@protoc_insertion_point(class_scope:tensorflow.RunMetadata) + private static final org.tensorflow.framework.RunMetadata DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.RunMetadata(); + } + + public static org.tensorflow.framework.RunMetadata getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RunMetadata parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RunMetadata(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.RunMetadata getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunMetadataOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunMetadataOrBuilder.java new file mode 100644 index 0000000000000..99ff321609404 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunMetadataOrBuilder.java @@ -0,0 +1,109 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +public interface RunMetadataOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.RunMetadata) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Statistics traced for this step. Populated if tracing is turned on via the
+   * "RunOptions" proto.
+   * EXPERIMENTAL: The format and set of events may change in future versions.
+   * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + boolean hasStepStats(); + /** + *
+   * Statistics traced for this step. Populated if tracing is turned on via the
+   * "RunOptions" proto.
+   * EXPERIMENTAL: The format and set of events may change in future versions.
+   * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + org.tensorflow.framework.StepStats getStepStats(); + /** + *
+   * Statistics traced for this step. Populated if tracing is turned on via the
+   * "RunOptions" proto.
+   * EXPERIMENTAL: The format and set of events may change in future versions.
+   * 
+ * + * optional .tensorflow.StepStats step_stats = 1; + */ + org.tensorflow.framework.StepStatsOrBuilder getStepStatsOrBuilder(); + + /** + *
+   * The cost graph for the computation defined by the run call.
+   * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + boolean hasCostGraph(); + /** + *
+   * The cost graph for the computation defined by the run call.
+   * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + org.tensorflow.framework.CostGraphDef getCostGraph(); + /** + *
+   * The cost graph for the computation defined by the run call.
+   * 
+ * + * optional .tensorflow.CostGraphDef cost_graph = 2; + */ + org.tensorflow.framework.CostGraphDefOrBuilder getCostGraphOrBuilder(); + + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + java.util.List + getPartitionGraphsList(); + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + org.tensorflow.framework.GraphDef getPartitionGraphs(int index); + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + int getPartitionGraphsCount(); + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + java.util.List + getPartitionGraphsOrBuilderList(); + /** + *
+   * Graphs of the partitions executed by executors.
+   * 
+ * + * repeated .tensorflow.GraphDef partition_graphs = 3; + */ + org.tensorflow.framework.GraphDefOrBuilder getPartitionGraphsOrBuilder( + int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunOptions.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunOptions.java new file mode 100644 index 0000000000000..a6561d28ac454 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunOptions.java @@ -0,0 +1,1247 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +/** + *
+ * EXPERIMENTAL. Options for a single Run() call.
+ * 
+ * + * Protobuf type {@code tensorflow.RunOptions} + */ +public final class RunOptions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.RunOptions) + RunOptionsOrBuilder { + // Use RunOptions.newBuilder() to construct. + private RunOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private RunOptions() { + traceLevel_ = 0; + timeoutInMs_ = 0L; + interOpThreadPool_ = 0; + debugTensorWatchOpts_ = java.util.Collections.emptyList(); + outputPartitionGraphs_ = false; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private RunOptions( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + + traceLevel_ = rawValue; + break; + } + case 16: { + + timeoutInMs_ = input.readInt64(); + break; + } + case 24: { + + interOpThreadPool_ = input.readInt32(); + break; + } + case 34: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + debugTensorWatchOpts_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + debugTensorWatchOpts_.add( + input.readMessage(org.tensorflow.framework.DebugTensorWatch.parser(), extensionRegistry)); + break; + } + case 40: { + + outputPartitionGraphs_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + debugTensorWatchOpts_ = java.util.Collections.unmodifiableList(debugTensorWatchOpts_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.RunOptions.class, org.tensorflow.framework.RunOptions.Builder.class); + } + + /** + *
+   * TODO(pbar) Turn this into a TraceOptions proto which allows
+   * tracing to be controlled in a more orthogonal manner?
+   * 
+ * + * Protobuf enum {@code tensorflow.RunOptions.TraceLevel} + */ + public enum TraceLevel + implements com.google.protobuf.ProtocolMessageEnum { + /** + * NO_TRACE = 0; + */ + NO_TRACE(0), + /** + * SOFTWARE_TRACE = 1; + */ + SOFTWARE_TRACE(1), + /** + * HARDWARE_TRACE = 2; + */ + HARDWARE_TRACE(2), + /** + * FULL_TRACE = 3; + */ + FULL_TRACE(3), + UNRECOGNIZED(-1), + ; + + /** + * NO_TRACE = 0; + */ + public static final int NO_TRACE_VALUE = 0; + /** + * SOFTWARE_TRACE = 1; + */ + public static final int SOFTWARE_TRACE_VALUE = 1; + /** + * HARDWARE_TRACE = 2; + */ + public static final int HARDWARE_TRACE_VALUE = 2; + /** + * FULL_TRACE = 3; + */ + public static final int FULL_TRACE_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static TraceLevel valueOf(int value) { + return forNumber(value); + } + + public static TraceLevel forNumber(int value) { + switch (value) { + case 0: return NO_TRACE; + case 1: return SOFTWARE_TRACE; + case 2: return HARDWARE_TRACE; + case 3: return FULL_TRACE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + TraceLevel> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public TraceLevel findValueByNumber(int number) { + return TraceLevel.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.tensorflow.framework.RunOptions.getDescriptor().getEnumTypes().get(0); + } + + private static final TraceLevel[] VALUES = values(); + + public static TraceLevel valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private TraceLevel(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:tensorflow.RunOptions.TraceLevel) + } + + private int bitField0_; + public static final int TRACE_LEVEL_FIELD_NUMBER = 1; + private int traceLevel_; + /** + * optional .tensorflow.RunOptions.TraceLevel trace_level = 1; + */ + public int getTraceLevelValue() { + return traceLevel_; + } + /** + * optional .tensorflow.RunOptions.TraceLevel trace_level = 1; + */ + public org.tensorflow.framework.RunOptions.TraceLevel getTraceLevel() { + org.tensorflow.framework.RunOptions.TraceLevel result = org.tensorflow.framework.RunOptions.TraceLevel.valueOf(traceLevel_); + return result == null ? org.tensorflow.framework.RunOptions.TraceLevel.UNRECOGNIZED : result; + } + + public static final int TIMEOUT_IN_MS_FIELD_NUMBER = 2; + private long timeoutInMs_; + /** + *
+   * Time to wait for operation to complete in milliseconds.
+   * 
+ * + * optional int64 timeout_in_ms = 2; + */ + public long getTimeoutInMs() { + return timeoutInMs_; + } + + public static final int INTER_OP_THREAD_POOL_FIELD_NUMBER = 3; + private int interOpThreadPool_; + /** + *
+   * The thread pool to use, if session_inter_op_thread_pool is configured.
+   * 
+ * + * optional int32 inter_op_thread_pool = 3; + */ + public int getInterOpThreadPool() { + return interOpThreadPool_; + } + + public static final int DEBUG_TENSOR_WATCH_OPTS_FIELD_NUMBER = 4; + private java.util.List debugTensorWatchOpts_; + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public java.util.List getDebugTensorWatchOptsList() { + return debugTensorWatchOpts_; + } + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public java.util.List + getDebugTensorWatchOptsOrBuilderList() { + return debugTensorWatchOpts_; + } + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public int getDebugTensorWatchOptsCount() { + return debugTensorWatchOpts_.size(); + } + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public org.tensorflow.framework.DebugTensorWatch getDebugTensorWatchOpts(int index) { + return debugTensorWatchOpts_.get(index); + } + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public org.tensorflow.framework.DebugTensorWatchOrBuilder getDebugTensorWatchOptsOrBuilder( + int index) { + return debugTensorWatchOpts_.get(index); + } + + public static final int OUTPUT_PARTITION_GRAPHS_FIELD_NUMBER = 5; + private boolean outputPartitionGraphs_; + /** + *
+   * Whether the partition graph(s) executed by the executor(s) should be
+   * outputted via RunMetadata.
+   * 
+ * + * optional bool output_partition_graphs = 5; + */ + public boolean getOutputPartitionGraphs() { + return outputPartitionGraphs_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (traceLevel_ != org.tensorflow.framework.RunOptions.TraceLevel.NO_TRACE.getNumber()) { + output.writeEnum(1, traceLevel_); + } + if (timeoutInMs_ != 0L) { + output.writeInt64(2, timeoutInMs_); + } + if (interOpThreadPool_ != 0) { + output.writeInt32(3, interOpThreadPool_); + } + for (int i = 0; i < debugTensorWatchOpts_.size(); i++) { + output.writeMessage(4, debugTensorWatchOpts_.get(i)); + } + if (outputPartitionGraphs_ != false) { + output.writeBool(5, outputPartitionGraphs_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (traceLevel_ != org.tensorflow.framework.RunOptions.TraceLevel.NO_TRACE.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, traceLevel_); + } + if (timeoutInMs_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, timeoutInMs_); + } + if (interOpThreadPool_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, interOpThreadPool_); + } + for (int i = 0; i < debugTensorWatchOpts_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, debugTensorWatchOpts_.get(i)); + } + if (outputPartitionGraphs_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, outputPartitionGraphs_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.RunOptions)) { + return super.equals(obj); + } + org.tensorflow.framework.RunOptions other = (org.tensorflow.framework.RunOptions) obj; + + boolean result = true; + result = result && traceLevel_ == other.traceLevel_; + result = result && (getTimeoutInMs() + == other.getTimeoutInMs()); + result = result && (getInterOpThreadPool() + == other.getInterOpThreadPool()); + result = result && getDebugTensorWatchOptsList() + .equals(other.getDebugTensorWatchOptsList()); + result = result && (getOutputPartitionGraphs() + == other.getOutputPartitionGraphs()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + TRACE_LEVEL_FIELD_NUMBER; + hash = (53 * hash) + traceLevel_; + hash = (37 * hash) + TIMEOUT_IN_MS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimeoutInMs()); + hash = (37 * hash) + INTER_OP_THREAD_POOL_FIELD_NUMBER; + hash = (53 * hash) + getInterOpThreadPool(); + if (getDebugTensorWatchOptsCount() > 0) { + hash = (37 * hash) + DEBUG_TENSOR_WATCH_OPTS_FIELD_NUMBER; + hash = (53 * hash) + getDebugTensorWatchOptsList().hashCode(); + } + hash = (37 * hash) + OUTPUT_PARTITION_GRAPHS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getOutputPartitionGraphs()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.RunOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.RunOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.RunOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.RunOptions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.RunOptions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.RunOptions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.RunOptions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.RunOptions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.RunOptions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.RunOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.RunOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+   * EXPERIMENTAL. Options for a single Run() call.
+   * 
+ * + * Protobuf type {@code tensorflow.RunOptions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.RunOptions) + org.tensorflow.framework.RunOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunOptions_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.RunOptions.class, org.tensorflow.framework.RunOptions.Builder.class); + } + + // Construct using org.tensorflow.framework.RunOptions.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getDebugTensorWatchOptsFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + traceLevel_ = 0; + + timeoutInMs_ = 0L; + + interOpThreadPool_ = 0; + + if (debugTensorWatchOptsBuilder_ == null) { + debugTensorWatchOpts_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + debugTensorWatchOptsBuilder_.clear(); + } + outputPartitionGraphs_ = false; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.ConfigProtos.internal_static_tensorflow_RunOptions_descriptor; + } + + public org.tensorflow.framework.RunOptions getDefaultInstanceForType() { + return org.tensorflow.framework.RunOptions.getDefaultInstance(); + } + + public org.tensorflow.framework.RunOptions build() { + org.tensorflow.framework.RunOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.RunOptions buildPartial() { + org.tensorflow.framework.RunOptions result = new org.tensorflow.framework.RunOptions(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.traceLevel_ = traceLevel_; + result.timeoutInMs_ = timeoutInMs_; + result.interOpThreadPool_ = interOpThreadPool_; + if (debugTensorWatchOptsBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + debugTensorWatchOpts_ = java.util.Collections.unmodifiableList(debugTensorWatchOpts_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.debugTensorWatchOpts_ = debugTensorWatchOpts_; + } else { + result.debugTensorWatchOpts_ = debugTensorWatchOptsBuilder_.build(); + } + result.outputPartitionGraphs_ = outputPartitionGraphs_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.RunOptions) { + return mergeFrom((org.tensorflow.framework.RunOptions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.RunOptions other) { + if (other == org.tensorflow.framework.RunOptions.getDefaultInstance()) return this; + if (other.traceLevel_ != 0) { + setTraceLevelValue(other.getTraceLevelValue()); + } + if (other.getTimeoutInMs() != 0L) { + setTimeoutInMs(other.getTimeoutInMs()); + } + if (other.getInterOpThreadPool() != 0) { + setInterOpThreadPool(other.getInterOpThreadPool()); + } + if (debugTensorWatchOptsBuilder_ == null) { + if (!other.debugTensorWatchOpts_.isEmpty()) { + if (debugTensorWatchOpts_.isEmpty()) { + debugTensorWatchOpts_ = other.debugTensorWatchOpts_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureDebugTensorWatchOptsIsMutable(); + debugTensorWatchOpts_.addAll(other.debugTensorWatchOpts_); + } + onChanged(); + } + } else { + if (!other.debugTensorWatchOpts_.isEmpty()) { + if (debugTensorWatchOptsBuilder_.isEmpty()) { + debugTensorWatchOptsBuilder_.dispose(); + debugTensorWatchOptsBuilder_ = null; + debugTensorWatchOpts_ = other.debugTensorWatchOpts_; + bitField0_ = (bitField0_ & ~0x00000008); + debugTensorWatchOptsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getDebugTensorWatchOptsFieldBuilder() : null; + } else { + debugTensorWatchOptsBuilder_.addAllMessages(other.debugTensorWatchOpts_); + } + } + } + if (other.getOutputPartitionGraphs() != false) { + setOutputPartitionGraphs(other.getOutputPartitionGraphs()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.RunOptions parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.RunOptions) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private int traceLevel_ = 0; + /** + * optional .tensorflow.RunOptions.TraceLevel trace_level = 1; + */ + public int getTraceLevelValue() { + return traceLevel_; + } + /** + * optional .tensorflow.RunOptions.TraceLevel trace_level = 1; + */ + public Builder setTraceLevelValue(int value) { + traceLevel_ = value; + onChanged(); + return this; + } + /** + * optional .tensorflow.RunOptions.TraceLevel trace_level = 1; + */ + public org.tensorflow.framework.RunOptions.TraceLevel getTraceLevel() { + org.tensorflow.framework.RunOptions.TraceLevel result = org.tensorflow.framework.RunOptions.TraceLevel.valueOf(traceLevel_); + return result == null ? org.tensorflow.framework.RunOptions.TraceLevel.UNRECOGNIZED : result; + } + /** + * optional .tensorflow.RunOptions.TraceLevel trace_level = 1; + */ + public Builder setTraceLevel(org.tensorflow.framework.RunOptions.TraceLevel value) { + if (value == null) { + throw new NullPointerException(); + } + + traceLevel_ = value.getNumber(); + onChanged(); + return this; + } + /** + * optional .tensorflow.RunOptions.TraceLevel trace_level = 1; + */ + public Builder clearTraceLevel() { + + traceLevel_ = 0; + onChanged(); + return this; + } + + private long timeoutInMs_ ; + /** + *
+     * Time to wait for operation to complete in milliseconds.
+     * 
+ * + * optional int64 timeout_in_ms = 2; + */ + public long getTimeoutInMs() { + return timeoutInMs_; + } + /** + *
+     * Time to wait for operation to complete in milliseconds.
+     * 
+ * + * optional int64 timeout_in_ms = 2; + */ + public Builder setTimeoutInMs(long value) { + + timeoutInMs_ = value; + onChanged(); + return this; + } + /** + *
+     * Time to wait for operation to complete in milliseconds.
+     * 
+ * + * optional int64 timeout_in_ms = 2; + */ + public Builder clearTimeoutInMs() { + + timeoutInMs_ = 0L; + onChanged(); + return this; + } + + private int interOpThreadPool_ ; + /** + *
+     * The thread pool to use, if session_inter_op_thread_pool is configured.
+     * 
+ * + * optional int32 inter_op_thread_pool = 3; + */ + public int getInterOpThreadPool() { + return interOpThreadPool_; + } + /** + *
+     * The thread pool to use, if session_inter_op_thread_pool is configured.
+     * 
+ * + * optional int32 inter_op_thread_pool = 3; + */ + public Builder setInterOpThreadPool(int value) { + + interOpThreadPool_ = value; + onChanged(); + return this; + } + /** + *
+     * The thread pool to use, if session_inter_op_thread_pool is configured.
+     * 
+ * + * optional int32 inter_op_thread_pool = 3; + */ + public Builder clearInterOpThreadPool() { + + interOpThreadPool_ = 0; + onChanged(); + return this; + } + + private java.util.List debugTensorWatchOpts_ = + java.util.Collections.emptyList(); + private void ensureDebugTensorWatchOptsIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + debugTensorWatchOpts_ = new java.util.ArrayList(debugTensorWatchOpts_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.DebugTensorWatch, org.tensorflow.framework.DebugTensorWatch.Builder, org.tensorflow.framework.DebugTensorWatchOrBuilder> debugTensorWatchOptsBuilder_; + + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public java.util.List getDebugTensorWatchOptsList() { + if (debugTensorWatchOptsBuilder_ == null) { + return java.util.Collections.unmodifiableList(debugTensorWatchOpts_); + } else { + return debugTensorWatchOptsBuilder_.getMessageList(); + } + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public int getDebugTensorWatchOptsCount() { + if (debugTensorWatchOptsBuilder_ == null) { + return debugTensorWatchOpts_.size(); + } else { + return debugTensorWatchOptsBuilder_.getCount(); + } + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public org.tensorflow.framework.DebugTensorWatch getDebugTensorWatchOpts(int index) { + if (debugTensorWatchOptsBuilder_ == null) { + return debugTensorWatchOpts_.get(index); + } else { + return debugTensorWatchOptsBuilder_.getMessage(index); + } + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public Builder setDebugTensorWatchOpts( + int index, org.tensorflow.framework.DebugTensorWatch value) { + if (debugTensorWatchOptsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDebugTensorWatchOptsIsMutable(); + debugTensorWatchOpts_.set(index, value); + onChanged(); + } else { + debugTensorWatchOptsBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public Builder setDebugTensorWatchOpts( + int index, org.tensorflow.framework.DebugTensorWatch.Builder builderForValue) { + if (debugTensorWatchOptsBuilder_ == null) { + ensureDebugTensorWatchOptsIsMutable(); + debugTensorWatchOpts_.set(index, builderForValue.build()); + onChanged(); + } else { + debugTensorWatchOptsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public Builder addDebugTensorWatchOpts(org.tensorflow.framework.DebugTensorWatch value) { + if (debugTensorWatchOptsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDebugTensorWatchOptsIsMutable(); + debugTensorWatchOpts_.add(value); + onChanged(); + } else { + debugTensorWatchOptsBuilder_.addMessage(value); + } + return this; + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public Builder addDebugTensorWatchOpts( + int index, org.tensorflow.framework.DebugTensorWatch value) { + if (debugTensorWatchOptsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDebugTensorWatchOptsIsMutable(); + debugTensorWatchOpts_.add(index, value); + onChanged(); + } else { + debugTensorWatchOptsBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public Builder addDebugTensorWatchOpts( + org.tensorflow.framework.DebugTensorWatch.Builder builderForValue) { + if (debugTensorWatchOptsBuilder_ == null) { + ensureDebugTensorWatchOptsIsMutable(); + debugTensorWatchOpts_.add(builderForValue.build()); + onChanged(); + } else { + debugTensorWatchOptsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public Builder addDebugTensorWatchOpts( + int index, org.tensorflow.framework.DebugTensorWatch.Builder builderForValue) { + if (debugTensorWatchOptsBuilder_ == null) { + ensureDebugTensorWatchOptsIsMutable(); + debugTensorWatchOpts_.add(index, builderForValue.build()); + onChanged(); + } else { + debugTensorWatchOptsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public Builder addAllDebugTensorWatchOpts( + java.lang.Iterable values) { + if (debugTensorWatchOptsBuilder_ == null) { + ensureDebugTensorWatchOptsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, debugTensorWatchOpts_); + onChanged(); + } else { + debugTensorWatchOptsBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public Builder clearDebugTensorWatchOpts() { + if (debugTensorWatchOptsBuilder_ == null) { + debugTensorWatchOpts_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + debugTensorWatchOptsBuilder_.clear(); + } + return this; + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public Builder removeDebugTensorWatchOpts(int index) { + if (debugTensorWatchOptsBuilder_ == null) { + ensureDebugTensorWatchOptsIsMutable(); + debugTensorWatchOpts_.remove(index); + onChanged(); + } else { + debugTensorWatchOptsBuilder_.remove(index); + } + return this; + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public org.tensorflow.framework.DebugTensorWatch.Builder getDebugTensorWatchOptsBuilder( + int index) { + return getDebugTensorWatchOptsFieldBuilder().getBuilder(index); + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public org.tensorflow.framework.DebugTensorWatchOrBuilder getDebugTensorWatchOptsOrBuilder( + int index) { + if (debugTensorWatchOptsBuilder_ == null) { + return debugTensorWatchOpts_.get(index); } else { + return debugTensorWatchOptsBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public java.util.List + getDebugTensorWatchOptsOrBuilderList() { + if (debugTensorWatchOptsBuilder_ != null) { + return debugTensorWatchOptsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(debugTensorWatchOpts_); + } + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public org.tensorflow.framework.DebugTensorWatch.Builder addDebugTensorWatchOptsBuilder() { + return getDebugTensorWatchOptsFieldBuilder().addBuilder( + org.tensorflow.framework.DebugTensorWatch.getDefaultInstance()); + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public org.tensorflow.framework.DebugTensorWatch.Builder addDebugTensorWatchOptsBuilder( + int index) { + return getDebugTensorWatchOptsFieldBuilder().addBuilder( + index, org.tensorflow.framework.DebugTensorWatch.getDefaultInstance()); + } + /** + *
+     * Debugging options
+     * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + public java.util.List + getDebugTensorWatchOptsBuilderList() { + return getDebugTensorWatchOptsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.DebugTensorWatch, org.tensorflow.framework.DebugTensorWatch.Builder, org.tensorflow.framework.DebugTensorWatchOrBuilder> + getDebugTensorWatchOptsFieldBuilder() { + if (debugTensorWatchOptsBuilder_ == null) { + debugTensorWatchOptsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.DebugTensorWatch, org.tensorflow.framework.DebugTensorWatch.Builder, org.tensorflow.framework.DebugTensorWatchOrBuilder>( + debugTensorWatchOpts_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + debugTensorWatchOpts_ = null; + } + return debugTensorWatchOptsBuilder_; + } + + private boolean outputPartitionGraphs_ ; + /** + *
+     * Whether the partition graph(s) executed by the executor(s) should be
+     * outputted via RunMetadata.
+     * 
+ * + * optional bool output_partition_graphs = 5; + */ + public boolean getOutputPartitionGraphs() { + return outputPartitionGraphs_; + } + /** + *
+     * Whether the partition graph(s) executed by the executor(s) should be
+     * outputted via RunMetadata.
+     * 
+ * + * optional bool output_partition_graphs = 5; + */ + public Builder setOutputPartitionGraphs(boolean value) { + + outputPartitionGraphs_ = value; + onChanged(); + return this; + } + /** + *
+     * Whether the partition graph(s) executed by the executor(s) should be
+     * outputted via RunMetadata.
+     * 
+ * + * optional bool output_partition_graphs = 5; + */ + public Builder clearOutputPartitionGraphs() { + + outputPartitionGraphs_ = false; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.RunOptions) + } + + // @@protoc_insertion_point(class_scope:tensorflow.RunOptions) + private static final org.tensorflow.framework.RunOptions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.RunOptions(); + } + + public static org.tensorflow.framework.RunOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public RunOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RunOptions(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.RunOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunOptionsOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunOptionsOrBuilder.java new file mode 100644 index 0000000000000..05d45c85e4bfd --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/RunOptionsOrBuilder.java @@ -0,0 +1,90 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/protobuf/config.proto + +package org.tensorflow.framework; + +public interface RunOptionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.RunOptions) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .tensorflow.RunOptions.TraceLevel trace_level = 1; + */ + int getTraceLevelValue(); + /** + * optional .tensorflow.RunOptions.TraceLevel trace_level = 1; + */ + org.tensorflow.framework.RunOptions.TraceLevel getTraceLevel(); + + /** + *
+   * Time to wait for operation to complete in milliseconds.
+   * 
+ * + * optional int64 timeout_in_ms = 2; + */ + long getTimeoutInMs(); + + /** + *
+   * The thread pool to use, if session_inter_op_thread_pool is configured.
+   * 
+ * + * optional int32 inter_op_thread_pool = 3; + */ + int getInterOpThreadPool(); + + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + java.util.List + getDebugTensorWatchOptsList(); + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + org.tensorflow.framework.DebugTensorWatch getDebugTensorWatchOpts(int index); + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + int getDebugTensorWatchOptsCount(); + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + java.util.List + getDebugTensorWatchOptsOrBuilderList(); + /** + *
+   * Debugging options
+   * 
+ * + * repeated .tensorflow.DebugTensorWatch debug_tensor_watch_opts = 4; + */ + org.tensorflow.framework.DebugTensorWatchOrBuilder getDebugTensorWatchOptsOrBuilder( + int index); + + /** + *
+   * Whether the partition graph(s) executed by the executor(s) should be
+   * outputted via RunMetadata.
+   * 
+ * + * optional bool output_partition_graphs = 5; + */ + boolean getOutputPartitionGraphs(); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SaveSliceInfoDef.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SaveSliceInfoDef.java new file mode 100644 index 0000000000000..7392c7ac8c916 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SaveSliceInfoDef.java @@ -0,0 +1,1122 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/variable.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.SaveSliceInfoDef} + */ +public final class SaveSliceInfoDef extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.SaveSliceInfoDef) + SaveSliceInfoDefOrBuilder { + // Use SaveSliceInfoDef.newBuilder() to construct. + private SaveSliceInfoDef(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private SaveSliceInfoDef() { + fullName_ = ""; + fullShape_ = java.util.Collections.emptyList(); + varOffset_ = java.util.Collections.emptyList(); + varShape_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private SaveSliceInfoDef( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + fullName_ = s; + break; + } + case 16: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + fullShape_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + fullShape_.add(input.readInt64()); + break; + } + case 18: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { + fullShape_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + while (input.getBytesUntilLimit() > 0) { + fullShape_.add(input.readInt64()); + } + input.popLimit(limit); + break; + } + case 24: { + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + varOffset_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + varOffset_.add(input.readInt64()); + break; + } + case 26: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000004) == 0x00000004) && input.getBytesUntilLimit() > 0) { + varOffset_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000004; + } + while (input.getBytesUntilLimit() > 0) { + varOffset_.add(input.readInt64()); + } + input.popLimit(limit); + break; + } + case 32: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + varShape_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + varShape_.add(input.readInt64()); + break; + } + case 34: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008) && input.getBytesUntilLimit() > 0) { + varShape_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000008; + } + while (input.getBytesUntilLimit() > 0) { + varShape_.add(input.readInt64()); + } + input.popLimit(limit); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + fullShape_ = java.util.Collections.unmodifiableList(fullShape_); + } + if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { + varOffset_ = java.util.Collections.unmodifiableList(varOffset_); + } + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + varShape_ = java.util.Collections.unmodifiableList(varShape_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.VariableProtos.internal_static_tensorflow_SaveSliceInfoDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.VariableProtos.internal_static_tensorflow_SaveSliceInfoDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.SaveSliceInfoDef.class, org.tensorflow.framework.SaveSliceInfoDef.Builder.class); + } + + private int bitField0_; + public static final int FULL_NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object fullName_; + /** + *
+   * Name of the full variable of which this is a slice.
+   * 
+ * + * optional string full_name = 1; + */ + public java.lang.String getFullName() { + java.lang.Object ref = fullName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fullName_ = s; + return s; + } + } + /** + *
+   * Name of the full variable of which this is a slice.
+   * 
+ * + * optional string full_name = 1; + */ + public com.google.protobuf.ByteString + getFullNameBytes() { + java.lang.Object ref = fullName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fullName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FULL_SHAPE_FIELD_NUMBER = 2; + private java.util.List fullShape_; + /** + *
+   * Shape of the full variable.
+   * 
+ * + * repeated int64 full_shape = 2; + */ + public java.util.List + getFullShapeList() { + return fullShape_; + } + /** + *
+   * Shape of the full variable.
+   * 
+ * + * repeated int64 full_shape = 2; + */ + public int getFullShapeCount() { + return fullShape_.size(); + } + /** + *
+   * Shape of the full variable.
+   * 
+ * + * repeated int64 full_shape = 2; + */ + public long getFullShape(int index) { + return fullShape_.get(index); + } + private int fullShapeMemoizedSerializedSize = -1; + + public static final int VAR_OFFSET_FIELD_NUMBER = 3; + private java.util.List varOffset_; + /** + *
+   * Offset of this variable into the full variable.
+   * 
+ * + * repeated int64 var_offset = 3; + */ + public java.util.List + getVarOffsetList() { + return varOffset_; + } + /** + *
+   * Offset of this variable into the full variable.
+   * 
+ * + * repeated int64 var_offset = 3; + */ + public int getVarOffsetCount() { + return varOffset_.size(); + } + /** + *
+   * Offset of this variable into the full variable.
+   * 
+ * + * repeated int64 var_offset = 3; + */ + public long getVarOffset(int index) { + return varOffset_.get(index); + } + private int varOffsetMemoizedSerializedSize = -1; + + public static final int VAR_SHAPE_FIELD_NUMBER = 4; + private java.util.List varShape_; + /** + *
+   * Shape of this variable.
+   * 
+ * + * repeated int64 var_shape = 4; + */ + public java.util.List + getVarShapeList() { + return varShape_; + } + /** + *
+   * Shape of this variable.
+   * 
+ * + * repeated int64 var_shape = 4; + */ + public int getVarShapeCount() { + return varShape_.size(); + } + /** + *
+   * Shape of this variable.
+   * 
+ * + * repeated int64 var_shape = 4; + */ + public long getVarShape(int index) { + return varShape_.get(index); + } + private int varShapeMemoizedSerializedSize = -1; + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (!getFullNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, fullName_); + } + if (getFullShapeList().size() > 0) { + output.writeUInt32NoTag(18); + output.writeUInt32NoTag(fullShapeMemoizedSerializedSize); + } + for (int i = 0; i < fullShape_.size(); i++) { + output.writeInt64NoTag(fullShape_.get(i)); + } + if (getVarOffsetList().size() > 0) { + output.writeUInt32NoTag(26); + output.writeUInt32NoTag(varOffsetMemoizedSerializedSize); + } + for (int i = 0; i < varOffset_.size(); i++) { + output.writeInt64NoTag(varOffset_.get(i)); + } + if (getVarShapeList().size() > 0) { + output.writeUInt32NoTag(34); + output.writeUInt32NoTag(varShapeMemoizedSerializedSize); + } + for (int i = 0; i < varShape_.size(); i++) { + output.writeInt64NoTag(varShape_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getFullNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, fullName_); + } + { + int dataSize = 0; + for (int i = 0; i < fullShape_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeInt64SizeNoTag(fullShape_.get(i)); + } + size += dataSize; + if (!getFullShapeList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + fullShapeMemoizedSerializedSize = dataSize; + } + { + int dataSize = 0; + for (int i = 0; i < varOffset_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeInt64SizeNoTag(varOffset_.get(i)); + } + size += dataSize; + if (!getVarOffsetList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + varOffsetMemoizedSerializedSize = dataSize; + } + { + int dataSize = 0; + for (int i = 0; i < varShape_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeInt64SizeNoTag(varShape_.get(i)); + } + size += dataSize; + if (!getVarShapeList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + varShapeMemoizedSerializedSize = dataSize; + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.SaveSliceInfoDef)) { + return super.equals(obj); + } + org.tensorflow.framework.SaveSliceInfoDef other = (org.tensorflow.framework.SaveSliceInfoDef) obj; + + boolean result = true; + result = result && getFullName() + .equals(other.getFullName()); + result = result && getFullShapeList() + .equals(other.getFullShapeList()); + result = result && getVarOffsetList() + .equals(other.getVarOffsetList()); + result = result && getVarShapeList() + .equals(other.getVarShapeList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + FULL_NAME_FIELD_NUMBER; + hash = (53 * hash) + getFullName().hashCode(); + if (getFullShapeCount() > 0) { + hash = (37 * hash) + FULL_SHAPE_FIELD_NUMBER; + hash = (53 * hash) + getFullShapeList().hashCode(); + } + if (getVarOffsetCount() > 0) { + hash = (37 * hash) + VAR_OFFSET_FIELD_NUMBER; + hash = (53 * hash) + getVarOffsetList().hashCode(); + } + if (getVarShapeCount() > 0) { + hash = (37 * hash) + VAR_SHAPE_FIELD_NUMBER; + hash = (53 * hash) + getVarShapeList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.SaveSliceInfoDef parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.SaveSliceInfoDef parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.SaveSliceInfoDef parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.SaveSliceInfoDef parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.SaveSliceInfoDef parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.SaveSliceInfoDef parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.SaveSliceInfoDef parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.SaveSliceInfoDef parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.SaveSliceInfoDef parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.SaveSliceInfoDef parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.SaveSliceInfoDef prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.SaveSliceInfoDef} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.SaveSliceInfoDef) + org.tensorflow.framework.SaveSliceInfoDefOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.VariableProtos.internal_static_tensorflow_SaveSliceInfoDef_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.VariableProtos.internal_static_tensorflow_SaveSliceInfoDef_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.SaveSliceInfoDef.class, org.tensorflow.framework.SaveSliceInfoDef.Builder.class); + } + + // Construct using org.tensorflow.framework.SaveSliceInfoDef.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + fullName_ = ""; + + fullShape_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + varOffset_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + varShape_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.VariableProtos.internal_static_tensorflow_SaveSliceInfoDef_descriptor; + } + + public org.tensorflow.framework.SaveSliceInfoDef getDefaultInstanceForType() { + return org.tensorflow.framework.SaveSliceInfoDef.getDefaultInstance(); + } + + public org.tensorflow.framework.SaveSliceInfoDef build() { + org.tensorflow.framework.SaveSliceInfoDef result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.SaveSliceInfoDef buildPartial() { + org.tensorflow.framework.SaveSliceInfoDef result = new org.tensorflow.framework.SaveSliceInfoDef(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.fullName_ = fullName_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + fullShape_ = java.util.Collections.unmodifiableList(fullShape_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.fullShape_ = fullShape_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + varOffset_ = java.util.Collections.unmodifiableList(varOffset_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.varOffset_ = varOffset_; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + varShape_ = java.util.Collections.unmodifiableList(varShape_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.varShape_ = varShape_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.SaveSliceInfoDef) { + return mergeFrom((org.tensorflow.framework.SaveSliceInfoDef)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.SaveSliceInfoDef other) { + if (other == org.tensorflow.framework.SaveSliceInfoDef.getDefaultInstance()) return this; + if (!other.getFullName().isEmpty()) { + fullName_ = other.fullName_; + onChanged(); + } + if (!other.fullShape_.isEmpty()) { + if (fullShape_.isEmpty()) { + fullShape_ = other.fullShape_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFullShapeIsMutable(); + fullShape_.addAll(other.fullShape_); + } + onChanged(); + } + if (!other.varOffset_.isEmpty()) { + if (varOffset_.isEmpty()) { + varOffset_ = other.varOffset_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureVarOffsetIsMutable(); + varOffset_.addAll(other.varOffset_); + } + onChanged(); + } + if (!other.varShape_.isEmpty()) { + if (varShape_.isEmpty()) { + varShape_ = other.varShape_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureVarShapeIsMutable(); + varShape_.addAll(other.varShape_); + } + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.SaveSliceInfoDef parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.SaveSliceInfoDef) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object fullName_ = ""; + /** + *
+     * Name of the full variable of which this is a slice.
+     * 
+ * + * optional string full_name = 1; + */ + public java.lang.String getFullName() { + java.lang.Object ref = fullName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fullName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+     * Name of the full variable of which this is a slice.
+     * 
+ * + * optional string full_name = 1; + */ + public com.google.protobuf.ByteString + getFullNameBytes() { + java.lang.Object ref = fullName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fullName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+     * Name of the full variable of which this is a slice.
+     * 
+ * + * optional string full_name = 1; + */ + public Builder setFullName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + fullName_ = value; + onChanged(); + return this; + } + /** + *
+     * Name of the full variable of which this is a slice.
+     * 
+ * + * optional string full_name = 1; + */ + public Builder clearFullName() { + + fullName_ = getDefaultInstance().getFullName(); + onChanged(); + return this; + } + /** + *
+     * Name of the full variable of which this is a slice.
+     * 
+ * + * optional string full_name = 1; + */ + public Builder setFullNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + fullName_ = value; + onChanged(); + return this; + } + + private java.util.List fullShape_ = java.util.Collections.emptyList(); + private void ensureFullShapeIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + fullShape_ = new java.util.ArrayList(fullShape_); + bitField0_ |= 0x00000002; + } + } + /** + *
+     * Shape of the full variable.
+     * 
+ * + * repeated int64 full_shape = 2; + */ + public java.util.List + getFullShapeList() { + return java.util.Collections.unmodifiableList(fullShape_); + } + /** + *
+     * Shape of the full variable.
+     * 
+ * + * repeated int64 full_shape = 2; + */ + public int getFullShapeCount() { + return fullShape_.size(); + } + /** + *
+     * Shape of the full variable.
+     * 
+ * + * repeated int64 full_shape = 2; + */ + public long getFullShape(int index) { + return fullShape_.get(index); + } + /** + *
+     * Shape of the full variable.
+     * 
+ * + * repeated int64 full_shape = 2; + */ + public Builder setFullShape( + int index, long value) { + ensureFullShapeIsMutable(); + fullShape_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Shape of the full variable.
+     * 
+ * + * repeated int64 full_shape = 2; + */ + public Builder addFullShape(long value) { + ensureFullShapeIsMutable(); + fullShape_.add(value); + onChanged(); + return this; + } + /** + *
+     * Shape of the full variable.
+     * 
+ * + * repeated int64 full_shape = 2; + */ + public Builder addAllFullShape( + java.lang.Iterable values) { + ensureFullShapeIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, fullShape_); + onChanged(); + return this; + } + /** + *
+     * Shape of the full variable.
+     * 
+ * + * repeated int64 full_shape = 2; + */ + public Builder clearFullShape() { + fullShape_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + private java.util.List varOffset_ = java.util.Collections.emptyList(); + private void ensureVarOffsetIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + varOffset_ = new java.util.ArrayList(varOffset_); + bitField0_ |= 0x00000004; + } + } + /** + *
+     * Offset of this variable into the full variable.
+     * 
+ * + * repeated int64 var_offset = 3; + */ + public java.util.List + getVarOffsetList() { + return java.util.Collections.unmodifiableList(varOffset_); + } + /** + *
+     * Offset of this variable into the full variable.
+     * 
+ * + * repeated int64 var_offset = 3; + */ + public int getVarOffsetCount() { + return varOffset_.size(); + } + /** + *
+     * Offset of this variable into the full variable.
+     * 
+ * + * repeated int64 var_offset = 3; + */ + public long getVarOffset(int index) { + return varOffset_.get(index); + } + /** + *
+     * Offset of this variable into the full variable.
+     * 
+ * + * repeated int64 var_offset = 3; + */ + public Builder setVarOffset( + int index, long value) { + ensureVarOffsetIsMutable(); + varOffset_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Offset of this variable into the full variable.
+     * 
+ * + * repeated int64 var_offset = 3; + */ + public Builder addVarOffset(long value) { + ensureVarOffsetIsMutable(); + varOffset_.add(value); + onChanged(); + return this; + } + /** + *
+     * Offset of this variable into the full variable.
+     * 
+ * + * repeated int64 var_offset = 3; + */ + public Builder addAllVarOffset( + java.lang.Iterable values) { + ensureVarOffsetIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, varOffset_); + onChanged(); + return this; + } + /** + *
+     * Offset of this variable into the full variable.
+     * 
+ * + * repeated int64 var_offset = 3; + */ + public Builder clearVarOffset() { + varOffset_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + + private java.util.List varShape_ = java.util.Collections.emptyList(); + private void ensureVarShapeIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + varShape_ = new java.util.ArrayList(varShape_); + bitField0_ |= 0x00000008; + } + } + /** + *
+     * Shape of this variable.
+     * 
+ * + * repeated int64 var_shape = 4; + */ + public java.util.List + getVarShapeList() { + return java.util.Collections.unmodifiableList(varShape_); + } + /** + *
+     * Shape of this variable.
+     * 
+ * + * repeated int64 var_shape = 4; + */ + public int getVarShapeCount() { + return varShape_.size(); + } + /** + *
+     * Shape of this variable.
+     * 
+ * + * repeated int64 var_shape = 4; + */ + public long getVarShape(int index) { + return varShape_.get(index); + } + /** + *
+     * Shape of this variable.
+     * 
+ * + * repeated int64 var_shape = 4; + */ + public Builder setVarShape( + int index, long value) { + ensureVarShapeIsMutable(); + varShape_.set(index, value); + onChanged(); + return this; + } + /** + *
+     * Shape of this variable.
+     * 
+ * + * repeated int64 var_shape = 4; + */ + public Builder addVarShape(long value) { + ensureVarShapeIsMutable(); + varShape_.add(value); + onChanged(); + return this; + } + /** + *
+     * Shape of this variable.
+     * 
+ * + * repeated int64 var_shape = 4; + */ + public Builder addAllVarShape( + java.lang.Iterable values) { + ensureVarShapeIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, varShape_); + onChanged(); + return this; + } + /** + *
+     * Shape of this variable.
+     * 
+ * + * repeated int64 var_shape = 4; + */ + public Builder clearVarShape() { + varShape_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.SaveSliceInfoDef) + } + + // @@protoc_insertion_point(class_scope:tensorflow.SaveSliceInfoDef) + private static final org.tensorflow.framework.SaveSliceInfoDef DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.SaveSliceInfoDef(); + } + + public static org.tensorflow.framework.SaveSliceInfoDef getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public SaveSliceInfoDef parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SaveSliceInfoDef(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.SaveSliceInfoDef getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SaveSliceInfoDefOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SaveSliceInfoDefOrBuilder.java new file mode 100644 index 0000000000000..6a3f299870fd5 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/SaveSliceInfoDefOrBuilder.java @@ -0,0 +1,102 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/variable.proto + +package org.tensorflow.framework; + +public interface SaveSliceInfoDefOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.SaveSliceInfoDef) + com.google.protobuf.MessageOrBuilder { + + /** + *
+   * Name of the full variable of which this is a slice.
+   * 
+ * + * optional string full_name = 1; + */ + java.lang.String getFullName(); + /** + *
+   * Name of the full variable of which this is a slice.
+   * 
+ * + * optional string full_name = 1; + */ + com.google.protobuf.ByteString + getFullNameBytes(); + + /** + *
+   * Shape of the full variable.
+   * 
+ * + * repeated int64 full_shape = 2; + */ + java.util.List getFullShapeList(); + /** + *
+   * Shape of the full variable.
+   * 
+ * + * repeated int64 full_shape = 2; + */ + int getFullShapeCount(); + /** + *
+   * Shape of the full variable.
+   * 
+ * + * repeated int64 full_shape = 2; + */ + long getFullShape(int index); + + /** + *
+   * Offset of this variable into the full variable.
+   * 
+ * + * repeated int64 var_offset = 3; + */ + java.util.List getVarOffsetList(); + /** + *
+   * Offset of this variable into the full variable.
+   * 
+ * + * repeated int64 var_offset = 3; + */ + int getVarOffsetCount(); + /** + *
+   * Offset of this variable into the full variable.
+   * 
+ * + * repeated int64 var_offset = 3; + */ + long getVarOffset(int index); + + /** + *
+   * Shape of this variable.
+   * 
+ * + * repeated int64 var_shape = 4; + */ + java.util.List getVarShapeList(); + /** + *
+   * Shape of this variable.
+   * 
+ * + * repeated int64 var_shape = 4; + */ + int getVarShapeCount(); + /** + *
+   * Shape of this variable.
+   * 
+ * + * repeated int64 var_shape = 4; + */ + long getVarShape(int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStats.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStats.java new file mode 100644 index 0000000000000..80ef30e91f46a --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStats.java @@ -0,0 +1,712 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +/** + * Protobuf type {@code tensorflow.StepStats} + */ +public final class StepStats extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.StepStats) + StepStatsOrBuilder { + // Use StepStats.newBuilder() to construct. + private StepStats(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private StepStats() { + devStats_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private StepStats( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + devStats_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + devStats_.add( + input.readMessage(org.tensorflow.framework.DeviceStepStats.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + devStats_ = java.util.Collections.unmodifiableList(devStats_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_StepStats_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_StepStats_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.StepStats.class, org.tensorflow.framework.StepStats.Builder.class); + } + + public static final int DEV_STATS_FIELD_NUMBER = 1; + private java.util.List devStats_; + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public java.util.List getDevStatsList() { + return devStats_; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public java.util.List + getDevStatsOrBuilderList() { + return devStats_; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public int getDevStatsCount() { + return devStats_.size(); + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public org.tensorflow.framework.DeviceStepStats getDevStats(int index) { + return devStats_.get(index); + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public org.tensorflow.framework.DeviceStepStatsOrBuilder getDevStatsOrBuilder( + int index) { + return devStats_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < devStats_.size(); i++) { + output.writeMessage(1, devStats_.get(i)); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < devStats_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, devStats_.get(i)); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.StepStats)) { + return super.equals(obj); + } + org.tensorflow.framework.StepStats other = (org.tensorflow.framework.StepStats) obj; + + boolean result = true; + result = result && getDevStatsList() + .equals(other.getDevStatsList()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getDevStatsCount() > 0) { + hash = (37 * hash) + DEV_STATS_FIELD_NUMBER; + hash = (53 * hash) + getDevStatsList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.StepStats parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.StepStats parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.StepStats parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.StepStats parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.StepStats parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.StepStats parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.StepStats parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.StepStats parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.StepStats parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.StepStats parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.StepStats prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.StepStats} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.StepStats) + org.tensorflow.framework.StepStatsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_StepStats_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_StepStats_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.StepStats.class, org.tensorflow.framework.StepStats.Builder.class); + } + + // Construct using org.tensorflow.framework.StepStats.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getDevStatsFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (devStatsBuilder_ == null) { + devStats_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + devStatsBuilder_.clear(); + } + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.StepStatsProtos.internal_static_tensorflow_StepStats_descriptor; + } + + public org.tensorflow.framework.StepStats getDefaultInstanceForType() { + return org.tensorflow.framework.StepStats.getDefaultInstance(); + } + + public org.tensorflow.framework.StepStats build() { + org.tensorflow.framework.StepStats result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.StepStats buildPartial() { + org.tensorflow.framework.StepStats result = new org.tensorflow.framework.StepStats(this); + int from_bitField0_ = bitField0_; + if (devStatsBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + devStats_ = java.util.Collections.unmodifiableList(devStats_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.devStats_ = devStats_; + } else { + result.devStats_ = devStatsBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.StepStats) { + return mergeFrom((org.tensorflow.framework.StepStats)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.StepStats other) { + if (other == org.tensorflow.framework.StepStats.getDefaultInstance()) return this; + if (devStatsBuilder_ == null) { + if (!other.devStats_.isEmpty()) { + if (devStats_.isEmpty()) { + devStats_ = other.devStats_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureDevStatsIsMutable(); + devStats_.addAll(other.devStats_); + } + onChanged(); + } + } else { + if (!other.devStats_.isEmpty()) { + if (devStatsBuilder_.isEmpty()) { + devStatsBuilder_.dispose(); + devStatsBuilder_ = null; + devStats_ = other.devStats_; + bitField0_ = (bitField0_ & ~0x00000001); + devStatsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getDevStatsFieldBuilder() : null; + } else { + devStatsBuilder_.addAllMessages(other.devStats_); + } + } + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.StepStats parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.StepStats) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List devStats_ = + java.util.Collections.emptyList(); + private void ensureDevStatsIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + devStats_ = new java.util.ArrayList(devStats_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.DeviceStepStats, org.tensorflow.framework.DeviceStepStats.Builder, org.tensorflow.framework.DeviceStepStatsOrBuilder> devStatsBuilder_; + + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public java.util.List getDevStatsList() { + if (devStatsBuilder_ == null) { + return java.util.Collections.unmodifiableList(devStats_); + } else { + return devStatsBuilder_.getMessageList(); + } + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public int getDevStatsCount() { + if (devStatsBuilder_ == null) { + return devStats_.size(); + } else { + return devStatsBuilder_.getCount(); + } + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public org.tensorflow.framework.DeviceStepStats getDevStats(int index) { + if (devStatsBuilder_ == null) { + return devStats_.get(index); + } else { + return devStatsBuilder_.getMessage(index); + } + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public Builder setDevStats( + int index, org.tensorflow.framework.DeviceStepStats value) { + if (devStatsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDevStatsIsMutable(); + devStats_.set(index, value); + onChanged(); + } else { + devStatsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public Builder setDevStats( + int index, org.tensorflow.framework.DeviceStepStats.Builder builderForValue) { + if (devStatsBuilder_ == null) { + ensureDevStatsIsMutable(); + devStats_.set(index, builderForValue.build()); + onChanged(); + } else { + devStatsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public Builder addDevStats(org.tensorflow.framework.DeviceStepStats value) { + if (devStatsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDevStatsIsMutable(); + devStats_.add(value); + onChanged(); + } else { + devStatsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public Builder addDevStats( + int index, org.tensorflow.framework.DeviceStepStats value) { + if (devStatsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDevStatsIsMutable(); + devStats_.add(index, value); + onChanged(); + } else { + devStatsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public Builder addDevStats( + org.tensorflow.framework.DeviceStepStats.Builder builderForValue) { + if (devStatsBuilder_ == null) { + ensureDevStatsIsMutable(); + devStats_.add(builderForValue.build()); + onChanged(); + } else { + devStatsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public Builder addDevStats( + int index, org.tensorflow.framework.DeviceStepStats.Builder builderForValue) { + if (devStatsBuilder_ == null) { + ensureDevStatsIsMutable(); + devStats_.add(index, builderForValue.build()); + onChanged(); + } else { + devStatsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public Builder addAllDevStats( + java.lang.Iterable values) { + if (devStatsBuilder_ == null) { + ensureDevStatsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, devStats_); + onChanged(); + } else { + devStatsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public Builder clearDevStats() { + if (devStatsBuilder_ == null) { + devStats_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + devStatsBuilder_.clear(); + } + return this; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public Builder removeDevStats(int index) { + if (devStatsBuilder_ == null) { + ensureDevStatsIsMutable(); + devStats_.remove(index); + onChanged(); + } else { + devStatsBuilder_.remove(index); + } + return this; + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public org.tensorflow.framework.DeviceStepStats.Builder getDevStatsBuilder( + int index) { + return getDevStatsFieldBuilder().getBuilder(index); + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public org.tensorflow.framework.DeviceStepStatsOrBuilder getDevStatsOrBuilder( + int index) { + if (devStatsBuilder_ == null) { + return devStats_.get(index); } else { + return devStatsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public java.util.List + getDevStatsOrBuilderList() { + if (devStatsBuilder_ != null) { + return devStatsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(devStats_); + } + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public org.tensorflow.framework.DeviceStepStats.Builder addDevStatsBuilder() { + return getDevStatsFieldBuilder().addBuilder( + org.tensorflow.framework.DeviceStepStats.getDefaultInstance()); + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public org.tensorflow.framework.DeviceStepStats.Builder addDevStatsBuilder( + int index) { + return getDevStatsFieldBuilder().addBuilder( + index, org.tensorflow.framework.DeviceStepStats.getDefaultInstance()); + } + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + public java.util.List + getDevStatsBuilderList() { + return getDevStatsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.DeviceStepStats, org.tensorflow.framework.DeviceStepStats.Builder, org.tensorflow.framework.DeviceStepStatsOrBuilder> + getDevStatsFieldBuilder() { + if (devStatsBuilder_ == null) { + devStatsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.tensorflow.framework.DeviceStepStats, org.tensorflow.framework.DeviceStepStats.Builder, org.tensorflow.framework.DeviceStepStatsOrBuilder>( + devStats_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + devStats_ = null; + } + return devStatsBuilder_; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.StepStats) + } + + // @@protoc_insertion_point(class_scope:tensorflow.StepStats) + private static final org.tensorflow.framework.StepStats DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.StepStats(); + } + + public static org.tensorflow.framework.StepStats getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public StepStats parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StepStats(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.StepStats getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStatsOrBuilder.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStatsOrBuilder.java new file mode 100644 index 0000000000000..59de9a5f4fed3 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStatsOrBuilder.java @@ -0,0 +1,33 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +public interface StepStatsOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.StepStats) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + java.util.List + getDevStatsList(); + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + org.tensorflow.framework.DeviceStepStats getDevStats(int index); + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + int getDevStatsCount(); + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + java.util.List + getDevStatsOrBuilderList(); + /** + * repeated .tensorflow.DeviceStepStats dev_stats = 1; + */ + org.tensorflow.framework.DeviceStepStatsOrBuilder getDevStatsOrBuilder( + int index); +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStatsProtos.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStatsProtos.java new file mode 100644 index 0000000000000..7d34682040651 --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/StepStatsProtos.java @@ -0,0 +1,124 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/step_stats.proto + +package org.tensorflow.framework; + +public final class StepStatsProtos { + private StepStatsProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_AllocatorMemoryUsed_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_AllocatorMemoryUsed_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_NodeOutput_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_NodeOutput_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_NodeExecStats_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_NodeExecStats_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_DeviceStepStats_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_DeviceStepStats_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_tensorflow_StepStats_descriptor; + static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_tensorflow_StepStats_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n*tensorflow/core/framework/step_stats.p" + + "roto\022\ntensorflow\0326tensorflow/core/framew" + + "ork/allocation_description.proto\0322tensor" + + "flow/core/framework/tensor_description.p" + + "roto\"V\n\023AllocatorMemoryUsed\022\026\n\016allocator" + + "_name\030\001 \001(\t\022\023\n\013total_bytes\030\002 \001(\003\022\022\n\npeak" + + "_bytes\030\003 \001(\003\"U\n\nNodeOutput\022\014\n\004slot\030\001 \001(\005" + + "\0229\n\022tensor_description\030\003 \001(\0132\035.tensorflo" + + "w.TensorDescription\"\354\002\n\rNodeExecStats\022\021\n" + + "\tnode_name\030\001 \001(\t\022\030\n\020all_start_micros\030\002 \001", + "(\003\022\033\n\023op_start_rel_micros\030\003 \001(\003\022\031\n\021op_en" + + "d_rel_micros\030\004 \001(\003\022\032\n\022all_end_rel_micros" + + "\030\005 \001(\003\022/\n\006memory\030\006 \003(\0132\037.tensorflow.Allo" + + "catorMemoryUsed\022&\n\006output\030\007 \003(\0132\026.tensor" + + "flow.NodeOutput\022\026\n\016timeline_label\030\010 \001(\t\022" + + "\030\n\020scheduled_micros\030\t \001(\003\022\021\n\tthread_id\030\n" + + " \001(\r\022<\n\021referenced_tensor\030\013 \003(\0132!.tensor" + + "flow.AllocationDescription\"P\n\017DeviceStep" + + "Stats\022\016\n\006device\030\001 \001(\t\022-\n\nnode_stats\030\002 \003(" + + "\0132\031.tensorflow.NodeExecStats\";\n\tStepStat", + "s\022.\n\tdev_stats\030\001 \003(\0132\033.tensorflow.Device" + + "StepStatsB0\n\030org.tensorflow.frameworkB\017S" + + "tepStatsProtosP\001\370\001\001b\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.tensorflow.framework.AllocationDescriptionProtos.getDescriptor(), + org.tensorflow.framework.TensorDescriptionProtos.getDescriptor(), + }, assigner); + internal_static_tensorflow_AllocatorMemoryUsed_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_tensorflow_AllocatorMemoryUsed_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_AllocatorMemoryUsed_descriptor, + new java.lang.String[] { "AllocatorName", "TotalBytes", "PeakBytes", }); + internal_static_tensorflow_NodeOutput_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_tensorflow_NodeOutput_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_NodeOutput_descriptor, + new java.lang.String[] { "Slot", "TensorDescription", }); + internal_static_tensorflow_NodeExecStats_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_tensorflow_NodeExecStats_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_NodeExecStats_descriptor, + new java.lang.String[] { "NodeName", "AllStartMicros", "OpStartRelMicros", "OpEndRelMicros", "AllEndRelMicros", "Memory", "Output", "TimelineLabel", "ScheduledMicros", "ThreadId", "ReferencedTensor", }); + internal_static_tensorflow_DeviceStepStats_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_tensorflow_DeviceStepStats_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_DeviceStepStats_descriptor, + new java.lang.String[] { "Device", "NodeStats", }); + internal_static_tensorflow_StepStats_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_tensorflow_StepStats_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_tensorflow_StepStats_descriptor, + new java.lang.String[] { "DevStats", }); + org.tensorflow.framework.AllocationDescriptionProtos.getDescriptor(); + org.tensorflow.framework.TensorDescriptionProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/Summary.java b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/Summary.java new file mode 100644 index 0000000000000..37468f7b6720e --- /dev/null +++ b/hadoop-deeplearning-project/tensorflow-bridge/src/main/java/org/tensorflow/framework/Summary.java @@ -0,0 +1,4211 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: tensorflow/core/framework/summary.proto + +package org.tensorflow.framework; + +/** + *
+ * A Summary is a set of named values to be displayed by the
+ * visualizer.
+ * Summaries are produced regularly during training, as controlled by
+ * the "summary_interval_secs" attribute of the training operation.
+ * Summaries are also produced at the end of an evaluation.
+ * 
+ * + * Protobuf type {@code tensorflow.Summary} + */ +public final class Summary extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.Summary) + SummaryOrBuilder { + // Use Summary.newBuilder() to construct. + private Summary(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Summary() { + value_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private Summary( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + value_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + value_.add( + input.readMessage(org.tensorflow.framework.Summary.Value.parser(), extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + value_ = java.util.Collections.unmodifiableList(value_); + } + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.Summary.class, org.tensorflow.framework.Summary.Builder.class); + } + + public interface ImageOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.Summary.Image) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Dimensions of the image.
+     * 
+ * + * optional int32 height = 1; + */ + int getHeight(); + + /** + * optional int32 width = 2; + */ + int getWidth(); + + /** + *
+     * Valid colorspace values are
+     *   1 - grayscale
+     *   2 - grayscale + alpha
+     *   3 - RGB
+     *   4 - RGBA
+     *   5 - DIGITAL_YUV
+     *   6 - BGRA
+     * 
+ * + * optional int32 colorspace = 3; + */ + int getColorspace(); + + /** + *
+     * Image data in encoded format.  All image formats supported by
+     * image_codec::CoderUtil can be stored here.
+     * 
+ * + * optional bytes encoded_image_string = 4; + */ + com.google.protobuf.ByteString getEncodedImageString(); + } + /** + * Protobuf type {@code tensorflow.Summary.Image} + */ + public static final class Image extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.Summary.Image) + ImageOrBuilder { + // Use Image.newBuilder() to construct. + private Image(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Image() { + height_ = 0; + width_ = 0; + colorspace_ = 0; + encodedImageString_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private Image( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 8: { + + height_ = input.readInt32(); + break; + } + case 16: { + + width_ = input.readInt32(); + break; + } + case 24: { + + colorspace_ = input.readInt32(); + break; + } + case 34: { + + encodedImageString_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.Summary.Image.class, org.tensorflow.framework.Summary.Image.Builder.class); + } + + public static final int HEIGHT_FIELD_NUMBER = 1; + private int height_; + /** + *
+     * Dimensions of the image.
+     * 
+ * + * optional int32 height = 1; + */ + public int getHeight() { + return height_; + } + + public static final int WIDTH_FIELD_NUMBER = 2; + private int width_; + /** + * optional int32 width = 2; + */ + public int getWidth() { + return width_; + } + + public static final int COLORSPACE_FIELD_NUMBER = 3; + private int colorspace_; + /** + *
+     * Valid colorspace values are
+     *   1 - grayscale
+     *   2 - grayscale + alpha
+     *   3 - RGB
+     *   4 - RGBA
+     *   5 - DIGITAL_YUV
+     *   6 - BGRA
+     * 
+ * + * optional int32 colorspace = 3; + */ + public int getColorspace() { + return colorspace_; + } + + public static final int ENCODED_IMAGE_STRING_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString encodedImageString_; + /** + *
+     * Image data in encoded format.  All image formats supported by
+     * image_codec::CoderUtil can be stored here.
+     * 
+ * + * optional bytes encoded_image_string = 4; + */ + public com.google.protobuf.ByteString getEncodedImageString() { + return encodedImageString_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (height_ != 0) { + output.writeInt32(1, height_); + } + if (width_ != 0) { + output.writeInt32(2, width_); + } + if (colorspace_ != 0) { + output.writeInt32(3, colorspace_); + } + if (!encodedImageString_.isEmpty()) { + output.writeBytes(4, encodedImageString_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (height_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, height_); + } + if (width_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, width_); + } + if (colorspace_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, colorspace_); + } + if (!encodedImageString_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, encodedImageString_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.Summary.Image)) { + return super.equals(obj); + } + org.tensorflow.framework.Summary.Image other = (org.tensorflow.framework.Summary.Image) obj; + + boolean result = true; + result = result && (getHeight() + == other.getHeight()); + result = result && (getWidth() + == other.getWidth()); + result = result && (getColorspace() + == other.getColorspace()); + result = result && getEncodedImageString() + .equals(other.getEncodedImageString()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + HEIGHT_FIELD_NUMBER; + hash = (53 * hash) + getHeight(); + hash = (37 * hash) + WIDTH_FIELD_NUMBER; + hash = (53 * hash) + getWidth(); + hash = (37 * hash) + COLORSPACE_FIELD_NUMBER; + hash = (53 * hash) + getColorspace(); + hash = (37 * hash) + ENCODED_IMAGE_STRING_FIELD_NUMBER; + hash = (53 * hash) + getEncodedImageString().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.Summary.Image parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.Summary.Image parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.Summary.Image parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.Summary.Image parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.Summary.Image parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.Summary.Image parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.Summary.Image parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.Summary.Image parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.Summary.Image parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.Summary.Image parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.Summary.Image prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.Summary.Image} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.Summary.Image) + org.tensorflow.framework.Summary.ImageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.Summary.Image.class, org.tensorflow.framework.Summary.Image.Builder.class); + } + + // Construct using org.tensorflow.framework.Summary.Image.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + height_ = 0; + + width_ = 0; + + colorspace_ = 0; + + encodedImageString_ = com.google.protobuf.ByteString.EMPTY; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Image_descriptor; + } + + public org.tensorflow.framework.Summary.Image getDefaultInstanceForType() { + return org.tensorflow.framework.Summary.Image.getDefaultInstance(); + } + + public org.tensorflow.framework.Summary.Image build() { + org.tensorflow.framework.Summary.Image result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.Summary.Image buildPartial() { + org.tensorflow.framework.Summary.Image result = new org.tensorflow.framework.Summary.Image(this); + result.height_ = height_; + result.width_ = width_; + result.colorspace_ = colorspace_; + result.encodedImageString_ = encodedImageString_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.Summary.Image) { + return mergeFrom((org.tensorflow.framework.Summary.Image)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.Summary.Image other) { + if (other == org.tensorflow.framework.Summary.Image.getDefaultInstance()) return this; + if (other.getHeight() != 0) { + setHeight(other.getHeight()); + } + if (other.getWidth() != 0) { + setWidth(other.getWidth()); + } + if (other.getColorspace() != 0) { + setColorspace(other.getColorspace()); + } + if (other.getEncodedImageString() != com.google.protobuf.ByteString.EMPTY) { + setEncodedImageString(other.getEncodedImageString()); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.Summary.Image parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.Summary.Image) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int height_ ; + /** + *
+       * Dimensions of the image.
+       * 
+ * + * optional int32 height = 1; + */ + public int getHeight() { + return height_; + } + /** + *
+       * Dimensions of the image.
+       * 
+ * + * optional int32 height = 1; + */ + public Builder setHeight(int value) { + + height_ = value; + onChanged(); + return this; + } + /** + *
+       * Dimensions of the image.
+       * 
+ * + * optional int32 height = 1; + */ + public Builder clearHeight() { + + height_ = 0; + onChanged(); + return this; + } + + private int width_ ; + /** + * optional int32 width = 2; + */ + public int getWidth() { + return width_; + } + /** + * optional int32 width = 2; + */ + public Builder setWidth(int value) { + + width_ = value; + onChanged(); + return this; + } + /** + * optional int32 width = 2; + */ + public Builder clearWidth() { + + width_ = 0; + onChanged(); + return this; + } + + private int colorspace_ ; + /** + *
+       * Valid colorspace values are
+       *   1 - grayscale
+       *   2 - grayscale + alpha
+       *   3 - RGB
+       *   4 - RGBA
+       *   5 - DIGITAL_YUV
+       *   6 - BGRA
+       * 
+ * + * optional int32 colorspace = 3; + */ + public int getColorspace() { + return colorspace_; + } + /** + *
+       * Valid colorspace values are
+       *   1 - grayscale
+       *   2 - grayscale + alpha
+       *   3 - RGB
+       *   4 - RGBA
+       *   5 - DIGITAL_YUV
+       *   6 - BGRA
+       * 
+ * + * optional int32 colorspace = 3; + */ + public Builder setColorspace(int value) { + + colorspace_ = value; + onChanged(); + return this; + } + /** + *
+       * Valid colorspace values are
+       *   1 - grayscale
+       *   2 - grayscale + alpha
+       *   3 - RGB
+       *   4 - RGBA
+       *   5 - DIGITAL_YUV
+       *   6 - BGRA
+       * 
+ * + * optional int32 colorspace = 3; + */ + public Builder clearColorspace() { + + colorspace_ = 0; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString encodedImageString_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Image data in encoded format.  All image formats supported by
+       * image_codec::CoderUtil can be stored here.
+       * 
+ * + * optional bytes encoded_image_string = 4; + */ + public com.google.protobuf.ByteString getEncodedImageString() { + return encodedImageString_; + } + /** + *
+       * Image data in encoded format.  All image formats supported by
+       * image_codec::CoderUtil can be stored here.
+       * 
+ * + * optional bytes encoded_image_string = 4; + */ + public Builder setEncodedImageString(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + + encodedImageString_ = value; + onChanged(); + return this; + } + /** + *
+       * Image data in encoded format.  All image formats supported by
+       * image_codec::CoderUtil can be stored here.
+       * 
+ * + * optional bytes encoded_image_string = 4; + */ + public Builder clearEncodedImageString() { + + encodedImageString_ = getDefaultInstance().getEncodedImageString(); + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.Summary.Image) + } + + // @@protoc_insertion_point(class_scope:tensorflow.Summary.Image) + private static final org.tensorflow.framework.Summary.Image DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.Summary.Image(); + } + + public static org.tensorflow.framework.Summary.Image getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + public Image parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Image(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public org.tensorflow.framework.Summary.Image getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface AudioOrBuilder extends + // @@protoc_insertion_point(interface_extends:tensorflow.Summary.Audio) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Sample rate of the audio in Hz.
+     * 
+ * + * optional float sample_rate = 1; + */ + float getSampleRate(); + + /** + *
+     * Number of channels of audio.
+     * 
+ * + * optional int64 num_channels = 2; + */ + long getNumChannels(); + + /** + *
+     * Length of the audio in frames (samples per channel).
+     * 
+ * + * optional int64 length_frames = 3; + */ + long getLengthFrames(); + + /** + *
+     * Encoded audio data and its associated RFC 2045 content type (e.g.
+     * "audio/wav").
+     * 
+ * + * optional bytes encoded_audio_string = 4; + */ + com.google.protobuf.ByteString getEncodedAudioString(); + + /** + * optional string content_type = 5; + */ + java.lang.String getContentType(); + /** + * optional string content_type = 5; + */ + com.google.protobuf.ByteString + getContentTypeBytes(); + } + /** + * Protobuf type {@code tensorflow.Summary.Audio} + */ + public static final class Audio extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:tensorflow.Summary.Audio) + AudioOrBuilder { + // Use Audio.newBuilder() to construct. + private Audio(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Audio() { + sampleRate_ = 0F; + numChannels_ = 0L; + lengthFrames_ = 0L; + encodedAudioString_ = com.google.protobuf.ByteString.EMPTY; + contentType_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); + } + private Audio( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!input.skipField(tag)) { + done = true; + } + break; + } + case 13: { + + sampleRate_ = input.readFloat(); + break; + } + case 16: { + + numChannels_ = input.readInt64(); + break; + } + case 24: { + + lengthFrames_ = input.readInt64(); + break; + } + case 34: { + + encodedAudioString_ = input.readBytes(); + break; + } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + + contentType_ = s; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.Summary.Audio.class, org.tensorflow.framework.Summary.Audio.Builder.class); + } + + public static final int SAMPLE_RATE_FIELD_NUMBER = 1; + private float sampleRate_; + /** + *
+     * Sample rate of the audio in Hz.
+     * 
+ * + * optional float sample_rate = 1; + */ + public float getSampleRate() { + return sampleRate_; + } + + public static final int NUM_CHANNELS_FIELD_NUMBER = 2; + private long numChannels_; + /** + *
+     * Number of channels of audio.
+     * 
+ * + * optional int64 num_channels = 2; + */ + public long getNumChannels() { + return numChannels_; + } + + public static final int LENGTH_FRAMES_FIELD_NUMBER = 3; + private long lengthFrames_; + /** + *
+     * Length of the audio in frames (samples per channel).
+     * 
+ * + * optional int64 length_frames = 3; + */ + public long getLengthFrames() { + return lengthFrames_; + } + + public static final int ENCODED_AUDIO_STRING_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString encodedAudioString_; + /** + *
+     * Encoded audio data and its associated RFC 2045 content type (e.g.
+     * "audio/wav").
+     * 
+ * + * optional bytes encoded_audio_string = 4; + */ + public com.google.protobuf.ByteString getEncodedAudioString() { + return encodedAudioString_; + } + + public static final int CONTENT_TYPE_FIELD_NUMBER = 5; + private volatile java.lang.Object contentType_; + /** + * optional string content_type = 5; + */ + public java.lang.String getContentType() { + java.lang.Object ref = contentType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + contentType_ = s; + return s; + } + } + /** + * optional string content_type = 5; + */ + public com.google.protobuf.ByteString + getContentTypeBytes() { + java.lang.Object ref = contentType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + contentType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (sampleRate_ != 0F) { + output.writeFloat(1, sampleRate_); + } + if (numChannels_ != 0L) { + output.writeInt64(2, numChannels_); + } + if (lengthFrames_ != 0L) { + output.writeInt64(3, lengthFrames_); + } + if (!encodedAudioString_.isEmpty()) { + output.writeBytes(4, encodedAudioString_); + } + if (!getContentTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, contentType_); + } + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (sampleRate_ != 0F) { + size += com.google.protobuf.CodedOutputStream + .computeFloatSize(1, sampleRate_); + } + if (numChannels_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, numChannels_); + } + if (lengthFrames_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, lengthFrames_); + } + if (!encodedAudioString_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, encodedAudioString_); + } + if (!getContentTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, contentType_); + } + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.tensorflow.framework.Summary.Audio)) { + return super.equals(obj); + } + org.tensorflow.framework.Summary.Audio other = (org.tensorflow.framework.Summary.Audio) obj; + + boolean result = true; + result = result && ( + java.lang.Float.floatToIntBits(getSampleRate()) + == java.lang.Float.floatToIntBits( + other.getSampleRate())); + result = result && (getNumChannels() + == other.getNumChannels()); + result = result && (getLengthFrames() + == other.getLengthFrames()); + result = result && getEncodedAudioString() + .equals(other.getEncodedAudioString()); + result = result && getContentType() + .equals(other.getContentType()); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (37 * hash) + SAMPLE_RATE_FIELD_NUMBER; + hash = (53 * hash) + java.lang.Float.floatToIntBits( + getSampleRate()); + hash = (37 * hash) + NUM_CHANNELS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getNumChannels()); + hash = (37 * hash) + LENGTH_FRAMES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLengthFrames()); + hash = (37 * hash) + ENCODED_AUDIO_STRING_FIELD_NUMBER; + hash = (53 * hash) + getEncodedAudioString().hashCode(); + hash = (37 * hash) + CONTENT_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getContentType().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.tensorflow.framework.Summary.Audio parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.Summary.Audio parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.Summary.Audio parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.tensorflow.framework.Summary.Audio parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.tensorflow.framework.Summary.Audio parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.Summary.Audio parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.Summary.Audio parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.tensorflow.framework.Summary.Audio parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.tensorflow.framework.Summary.Audio parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.tensorflow.framework.Summary.Audio parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.tensorflow.framework.Summary.Audio prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code tensorflow.Summary.Audio} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:tensorflow.Summary.Audio) + org.tensorflow.framework.Summary.AudioOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_descriptor; + } + + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.tensorflow.framework.Summary.Audio.class, org.tensorflow.framework.Summary.Audio.Builder.class); + } + + // Construct using org.tensorflow.framework.Summary.Audio.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + public Builder clear() { + super.clear(); + sampleRate_ = 0F; + + numChannels_ = 0L; + + lengthFrames_ = 0L; + + encodedAudioString_ = com.google.protobuf.ByteString.EMPTY; + + contentType_ = ""; + + return this; + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.tensorflow.framework.SummaryProtos.internal_static_tensorflow_Summary_Audio_descriptor; + } + + public org.tensorflow.framework.Summary.Audio getDefaultInstanceForType() { + return org.tensorflow.framework.Summary.Audio.getDefaultInstance(); + } + + public org.tensorflow.framework.Summary.Audio build() { + org.tensorflow.framework.Summary.Audio result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.tensorflow.framework.Summary.Audio buildPartial() { + org.tensorflow.framework.Summary.Audio result = new org.tensorflow.framework.Summary.Audio(this); + result.sampleRate_ = sampleRate_; + result.numChannels_ = numChannels_; + result.lengthFrames_ = lengthFrames_; + result.encodedAudioString_ = encodedAudioString_; + result.contentType_ = contentType_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.tensorflow.framework.Summary.Audio) { + return mergeFrom((org.tensorflow.framework.Summary.Audio)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.tensorflow.framework.Summary.Audio other) { + if (other == org.tensorflow.framework.Summary.Audio.getDefaultInstance()) return this; + if (other.getSampleRate() != 0F) { + setSampleRate(other.getSampleRate()); + } + if (other.getNumChannels() != 0L) { + setNumChannels(other.getNumChannels()); + } + if (other.getLengthFrames() != 0L) { + setLengthFrames(other.getLengthFrames()); + } + if (other.getEncodedAudioString() != com.google.protobuf.ByteString.EMPTY) { + setEncodedAudioString(other.getEncodedAudioString()); + } + if (!other.getContentType().isEmpty()) { + contentType_ = other.contentType_; + onChanged(); + } + onChanged(); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.tensorflow.framework.Summary.Audio parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.tensorflow.framework.Summary.Audio) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private float sampleRate_ ; + /** + *
+       * Sample rate of the audio in Hz.
+       * 
+ * + * optional float sample_rate = 1; + */ + public float getSampleRate() { + return sampleRate_; + } + /** + *
+       * Sample rate of the audio in Hz.
+       * 
+ * + * optional float sample_rate = 1; + */ + public Builder setSampleRate(float value) { + + sampleRate_ = value; + onChanged(); + return this; + } + /** + *
+       * Sample rate of the audio in Hz.
+       * 
+ * + * optional float sample_rate = 1; + */ + public Builder clearSampleRate() { + + sampleRate_ = 0F; + onChanged(); + return this; + } + + private long numChannels_ ; + /** + *
+       * Number of channels of audio.
+       * 
+ * + * optional int64 num_channels = 2; + */ + public long getNumChannels() { + return numChannels_; + } + /** + *
+       * Number of channels of audio.
+       * 
+ * + * optional int64 num_channels = 2; + */ + public Builder setNumChannels(long value) { + + numChannels_ = value; + onChanged(); + return this; + } + /** + *
+       * Number of channels of audio.
+       * 
+ * + * optional int64 num_channels = 2; + */ + public Builder clearNumChannels() { + + numChannels_ = 0L; + onChanged(); + return this; + } + + private long lengthFrames_ ; + /** + *
+       * Length of the audio in frames (samples per channel).
+       * 
+ * + * optional int64 length_frames = 3; + */ + public long getLengthFrames() { + return lengthFrames_; + } + /** + *
+       * Length of the audio in frames (samples per channel).
+       * 
+ * + * optional int64 length_frames = 3; + */ + public Builder setLengthFrames(long value) { + + lengthFrames_ = value; + onChanged(); + return this; + } + /** + *
+       * Length of the audio in frames (samples per channel).
+       * 
+ * + * optional int64 length_frames = 3; + */ + public Builder clearLengthFrames() { + + lengthFrames_ = 0L; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString encodedAudioString_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Encoded audio data and its associated RFC 2045 content type (e.g.
+       * "audio/wav").
+       * 
+ * + * optional bytes encoded_audio_string = 4; + */ + public com.google.protobuf.ByteString getEncodedAudioString() { + return encodedAudioString_; + } + /** + *
+       * Encoded audio data and its associated RFC 2045 content type (e.g.
+       * "audio/wav").
+       * 
+ * + * optional bytes encoded_audio_string = 4; + */ + public Builder setEncodedAudioString(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + + encodedAudioString_ = value; + onChanged(); + return this; + } + /** + *
+       * Encoded audio data and its associated RFC 2045 content type (e.g.
+       * "audio/wav").
+       * 
+ * + * optional bytes encoded_audio_string = 4; + */ + public Builder clearEncodedAudioString() { + + encodedAudioString_ = getDefaultInstance().getEncodedAudioString(); + onChanged(); + return this; + } + + private java.lang.Object contentType_ = ""; + /** + * optional string content_type = 5; + */ + public java.lang.String getContentType() { + java.lang.Object ref = contentType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + contentType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string content_type = 5; + */ + public com.google.protobuf.ByteString + getContentTypeBytes() { + java.lang.Object ref = contentType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + contentType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string content_type = 5; + */ + public Builder setContentType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + contentType_ = value; + onChanged(); + return this; + } + /** + * optional string content_type = 5; + */ + public Builder clearContentType() { + + contentType_ = getDefaultInstance().getContentType(); + onChanged(); + return this; + } + /** + * optional string content_type = 5; + */ + public Builder setContentTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + contentType_ = value; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return this; + } + + + // @@protoc_insertion_point(builder_scope:tensorflow.Summary.Audio) + } + + // @@protoc_insertion_point(class_scope:tensorflow.Summary.Audio) + private static final org.tensorflow.framework.Summary.Audio DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.tensorflow.framework.Summary.Audio(); + } + + public static org.tensorflow.framework.Summary.Audio getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser