Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
76345: import: default to writing at ingest time r=dt a=dt

Release note: none.

76368: build: turn off metamorphic constants for more gen rules r=ajwerner a=stevendanna

This add `COCKROACH_INTERNAL_DISABLE_METAMORPHIC_TESTING=true` to more
tool invocations to cut down on log spam.

Informs cockroachdb#76363

Release note: None

76416: backupccl: rename incremental_storage option to incremental_location r=dt a=msbutler

Waiting to refactor internal variables until cockroachdb#75970 merges.

Release note (sql change): refactored BACKUP, SHOW BACKUP, and RESTORE
incremental_storage option to incremental_location.

Co-authored-by: David Taylor <[email protected]>
Co-authored-by: Steven Danna <[email protected]>
Co-authored-by: Michael Butler <[email protected]>
  • Loading branch information
4 people committed Feb 11, 2022
4 parents fb98d7b + 5bbda1c + 8a30fac + 6a2c3fa commit eddc35f
Show file tree
Hide file tree
Showing 13 changed files with 63 additions and 60 deletions.
6 changes: 3 additions & 3 deletions docs/generated/sql/bnf/stmt_block.bnf
Original file line number Diff line number Diff line change
Expand Up @@ -969,7 +969,7 @@ unreserved_keyword ::=
| 'INCLUDING'
| 'INCREMENT'
| 'INCREMENTAL'
| 'INCREMENTAL_STORAGE'
| 'INCREMENTAL_LOCATION'
| 'INDEXES'
| 'INHERITS'
| 'INJECT'
Expand Down Expand Up @@ -1894,7 +1894,7 @@ backup_options ::=
| 'REVISION_HISTORY'
| 'DETACHED'
| 'KMS' '=' string_or_placeholder_opt_list
| 'INCREMENTAL_STORAGE' '=' string_or_placeholder_opt_list
| 'INCREMENTAL_LOCATION' '=' string_or_placeholder_opt_list

c_expr ::=
d_expr
Expand Down Expand Up @@ -2138,7 +2138,7 @@ restore_options ::=
| 'SKIP_LOCALITIES_CHECK'
| 'DEBUG_PAUSE_ON' '=' string_or_placeholder
| 'NEW_DB_NAME' '=' string_or_placeholder
| 'INCREMENTAL_STORAGE' '=' string_or_placeholder_opt_list
| 'INCREMENTAL_LOCATION' '=' string_or_placeholder_opt_list

scrub_option_list ::=
( scrub_option ) ( ( ',' scrub_option ) )*
Expand Down
12 changes: 6 additions & 6 deletions pkg/ccl/backupccl/backup_destination_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -278,9 +278,9 @@ func TestBackupRestoreResolveDestination(t *testing.T) {
// - BACKUP INTO collection
// - BACKUP INTO LATEST IN collection
// - BACKUP INTO full1 IN collection
// - BACKUP INTO full1 IN collection, incremental_storage = inc_storage_path
// - BACKUP INTO full1 IN collection, incremental_storage = inc_storage_path
// - BACKUP INTO LATEST IN collection, incremental_storage = inc_storage_path
// - BACKUP INTO full1 IN collection, incremental_location = inc_storage_path
// - BACKUP INTO full1 IN collection, incremental_location = inc_storage_path
// - BACKUP INTO LATEST IN collection, incremental_location = inc_storage_path
t.Run("collection", func(t *testing.T) {
collectionLoc := fmt.Sprintf("nodelocal://1/%s/?AUTH=implicit", t.Name())
collectionTo := localizeURI(t, collectionLoc, localities)
Expand Down Expand Up @@ -442,7 +442,7 @@ func TestBackupRestoreResolveDestination(t *testing.T) {
writeManifest(t, expectedDefault)
}

// A remote incremental into the first full: BACKUP INTO full1 IN collection, incremental_storage = inc_storage_path
// A remote incremental into the first full: BACKUP INTO full1 IN collection, incremental_location = inc_storage_path
{
expectedSuffix := "/2020/12/25-060000.00"
expectedIncDir := "/20201225/090000.00"
Expand All @@ -460,7 +460,7 @@ func TestBackupRestoreResolveDestination(t *testing.T) {
firstRemoteBackupChain = append(firstRemoteBackupChain, expectedDefault)
}

// Another remote incremental into the first full: BACKUP INTO full1 IN collection, incremental_storage = inc_storage_path
// Another remote incremental into the first full: BACKUP INTO full1 IN collection, incremental_location = inc_storage_path
{
expectedSuffix := "/2020/12/25-060000.00"
expectedIncDir := "/20201225/093000.00"
Expand All @@ -477,7 +477,7 @@ func TestBackupRestoreResolveDestination(t *testing.T) {
}

// A remote incremental into the second full backup: BACKUP INTO LATEST IN collection,
//incremental_storage = inc_storage_path
//incremental_location = inc_storage_path
{
expectedSuffix := "/2020/12/25-073000.00"
expectedIncDir := "/20201225/100000.00"
Expand Down
4 changes: 2 additions & 2 deletions pkg/ccl/backupccl/backup_planning.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ const (
backupOptWithPrivileges = "privileges"
backupOptAsJSON = "as_json"
backupOptWithDebugIDs = "debug_ids"
backupOptIncStorage = "incremental_storage"
backupOptIncStorage = "incremental_location"
localityURLParam = "COCKROACH_LOCALITY"
defaultLocalityValue = "default"
)
Expand Down Expand Up @@ -678,7 +678,7 @@ func backupPlanHook(
return err
}
if !backupStmt.Nested && len(incrementalStorage) > 0 {
return errors.New("incremental_storage option not supported with `BACKUP TO` syntax")
return errors.New("incremental_location option not supported with `BACKUP TO` syntax")
}

endTime := p.ExecCfg().Clock.Now()
Expand Down
14 changes: 7 additions & 7 deletions pkg/ccl/backupccl/backup_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -906,7 +906,7 @@ func TestBackupAndRestoreJobDescription(t *testing.T) {
sqlDB.Exec(t, "BACKUP INTO ($1, $2, $3)", collections...)
sqlDB.Exec(t, "BACKUP INTO LATEST IN ($1, $2, $3)", collections...)
sqlDB.Exec(t, "BACKUP INTO $4 IN ($1, $2, $3)", append(collections, "subdir")...)
sqlDB.Exec(t, "BACKUP INTO LATEST IN $4 WITH incremental_storage = ($1, $2, $3)",
sqlDB.Exec(t, "BACKUP INTO LATEST IN $4 WITH incremental_location = ($1, $2, $3)",
append(incrementals, collections[0])...)

// Find the subdirectory created by the full BACKUP INTO statement.
Expand All @@ -928,7 +928,7 @@ func TestBackupAndRestoreJobDescription(t *testing.T) {
collections[0], collections[1], collections[2])},
{fmt.Sprintf("BACKUP INTO '%s' IN ('%s', '%s', '%s')", "/subdir",
collections[0], collections[1], collections[2])},
{fmt.Sprintf("BACKUP INTO '%s' IN '%s' WITH incremental_storage = ('%s', '%s', '%s')",
{fmt.Sprintf("BACKUP INTO '%s' IN '%s' WITH incremental_location = ('%s', '%s', '%s')",
"/subdir", collections[0], incrementals[0],
incrementals[1], incrementals[2])},
},
Expand All @@ -948,7 +948,7 @@ func TestBackupAndRestoreJobDescription(t *testing.T) {

sqlDB.Exec(t, "DROP DATABASE data CASCADE")
sqlDB.Exec(t, "RESTORE DATABASE data FROM LATEST IN ($1, $2, "+
"$3) WITH incremental_storage = ($4, $5, $6)",
"$3) WITH incremental_location = ($4, $5, $6)",
append(collections, incrementals[0], incrementals[1], incrementals[2])...)

// The flavors of BACKUP and RESTORE which automatically resolve the right
Expand Down Expand Up @@ -985,7 +985,7 @@ func TestBackupAndRestoreJobDescription(t *testing.T) {
{fmt.Sprintf("RESTORE DATABASE data FROM ('%s', '%s', '%s')",
resolvedSubdirURIs[0], resolvedSubdirURIs[1],
resolvedSubdirURIs[2])},
{fmt.Sprintf("RESTORE DATABASE data FROM ('%s', '%s', '%s') WITH incremental_storage = ('%s', '%s', '%s')",
{fmt.Sprintf("RESTORE DATABASE data FROM ('%s', '%s', '%s') WITH incremental_location = ('%s', '%s', '%s')",
resolvedSubdirURIs[0], resolvedSubdirURIs[1], resolvedSubdirURIs[2],
resolvedIncURIs[0], resolvedIncURIs[1], resolvedIncURIs[2])},
},
Expand Down Expand Up @@ -9171,7 +9171,7 @@ func TestBackupMemMonitorSSTSinkQueueSize(t *testing.T) {
}

// TestBackupRestoreSeperateIncrementalPrefix tests that a backup/restore round
// trip using the 'incremental_storage' parameter restores the same db as a BR
// trip using the 'incremental_location' parameter restores the same db as a BR
// round trip without the parameter.
func TestBackupRestoreSeparateIncrementalPrefix(t *testing.T) {
defer leaktest.AfterTest(t)()
Expand Down Expand Up @@ -9222,9 +9222,9 @@ func TestBackupRestoreSeparateIncrementalPrefix(t *testing.T) {

sqlDB.Exec(t, `INSERT INTO fkdb.fk (ind) VALUES ($1)`, 200)

sib := fmt.Sprintf("BACKUP DATABASE fkdb INTO LATEST IN %s WITH incremental_storage = %s", dest, inc)
sib := fmt.Sprintf("BACKUP DATABASE fkdb INTO LATEST IN %s WITH incremental_location = %s", dest, inc)
sqlDB.Exec(t, sib)
sir := fmt.Sprintf("RESTORE DATABASE fkdb FROM LATEST IN %s WITH new_db_name = 'inc_fkdb', incremental_storage = %s", dest, inc)
sir := fmt.Sprintf("RESTORE DATABASE fkdb FROM LATEST IN %s WITH new_db_name = 'inc_fkdb', incremental_location = %s", dest, inc)
sqlDB.Exec(t, sir)

ib := fmt.Sprintf("BACKUP DATABASE fkdb INTO LATEST IN %s", dest)
Expand Down
6 changes: 3 additions & 3 deletions pkg/ccl/backupccl/create_scheduled_backup_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -421,13 +421,13 @@ func TestSerializesScheduledBackupExecutionArgs(t *testing.T) {
},
},
{
name: "full-cluster-remote-incremental-storage",
query: "CREATE SCHEDULE FOR BACKUP INTO 'nodelocal://0/backup' WITH incremental_storage = 'nodelocal://1/incremental' RECURRING '@hourly'",
name: "full-cluster-remote-incremental-location",
query: "CREATE SCHEDULE FOR BACKUP INTO 'nodelocal://0/backup' WITH incremental_location = 'nodelocal://1/incremental' RECURRING '@hourly'",
user: enterpriseUser,
expectedSchedules: []expectedSchedule{
{
nameRe: "BACKUP .*",
backupStmt: "BACKUP INTO LATEST IN 'nodelocal://0/backup' WITH detached, incremental_storage = 'nodelocal://1/incremental'",
backupStmt: "BACKUP INTO LATEST IN 'nodelocal://0/backup' WITH detached, incremental_location = 'nodelocal://1/incremental'",
period: time.Hour,
paused: true,
},
Expand Down
4 changes: 2 additions & 2 deletions pkg/ccl/backupccl/restore_planning.go
Original file line number Diff line number Diff line change
Expand Up @@ -1522,7 +1522,7 @@ func restorePlanHook(
var incStorageFn func() ([]string, error)
if restoreStmt.Options.IncrementalStorage != nil {
if restoreStmt.Subdir == nil {
err = errors.New("incremental_storage can only be used with the following" +
err = errors.New("incremental_location can only be used with the following" +
" syntax: 'RESTORE [target] FROM [subdirectory] IN [destination]'")
return nil, nil, nil, false, err
}
Expand Down Expand Up @@ -1615,7 +1615,7 @@ func restorePlanHook(

// incFrom will contain the directory URIs for incremental backups (i.e.
// <prefix>/<subdir>) iff len(From)==1, regardless of the
// 'incremental_storage' param. len(From)=1 implies that the user has not
// 'incremental_location' param. len(From)=1 implies that the user has not
// explicitly passed incremental backups, so we'll have to look for any in
// <prefix>/<subdir>. len(incFrom)>1 implies the incremental backups are
// locality aware.
Expand Down
6 changes: 3 additions & 3 deletions pkg/ccl/backupccl/show_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -445,8 +445,8 @@ func TestShowBackups(t *testing.T) {
sqlDB.Exec(t, `BACKUP data.bank INTO $1`, full)
sqlDB.Exec(t, `BACKUP data.bank INTO LATEST IN $1`, full)
// Make 2 remote incremental backups, chaining to the third full backup
sqlDB.Exec(t, `BACKUP data.bank INTO LATEST IN $1 WITH incremental_storage = $2`, full, remoteInc)
sqlDB.Exec(t, `BACKUP data.bank INTO LATEST IN $1 WITH incremental_storage = $2`, full, remoteInc)
sqlDB.Exec(t, `BACKUP data.bank INTO LATEST IN $1 WITH incremental_location = $2`, full, remoteInc)
sqlDB.Exec(t, `BACKUP data.bank INTO LATEST IN $1 WITH incremental_location = $2`, full, remoteInc)

rows := sqlDBRestore.QueryStr(t, `SHOW BACKUPS IN $1`, full)

Expand All @@ -466,7 +466,7 @@ func TestShowBackups(t *testing.T) {

// check that full and remote incremental backups appear
b3 := sqlDBRestore.QueryStr(t,
`SELECT * FROM [SHOW BACKUP LATEST IN $1 WITH incremental_storage= 'nodelocal://0/foo/inc'] WHERE object_type='table'`, full)
`SELECT * FROM [SHOW BACKUP LATEST IN $1 WITH incremental_location= 'nodelocal://0/foo/inc'] WHERE object_type='table'`, full)
require.Equal(t, 3, len(b3))

}
Expand Down
18 changes: 9 additions & 9 deletions pkg/ccl/importccl/import_processor.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import (
"github.com/cockroachdb/cockroach/pkg/util/ctxgroup"
"github.com/cockroachdb/cockroach/pkg/util/errorutil/unimplemented"
"github.com/cockroachdb/cockroach/pkg/util/hlc"
"github.com/cockroachdb/cockroach/pkg/util/log"
"github.com/cockroachdb/cockroach/pkg/util/protoutil"
"github.com/cockroachdb/cockroach/pkg/util/syncutil"
"github.com/cockroachdb/cockroach/pkg/util/timeutil"
Expand Down Expand Up @@ -100,7 +101,7 @@ var importAtNow = settings.RegisterBoolSetting(
settings.TenantWritable,
"bulkio.import_at_current_time.enabled",
"write imported data at the current timestamp, when each batch is flushed",
false,
true,
)

// ImportBufferConfigSizes determines the minimum, maximum and step size for the
Expand Down Expand Up @@ -327,14 +328,13 @@ func ingestKvs(
defer span.Finish()

writeTS := hlc.Timestamp{WallTime: spec.WalltimeNanos}
writeAtRequestTime := false
if importAtNow.Get(&flowCtx.Cfg.Settings.SV) {
if !flowCtx.Cfg.Settings.Version.IsActive(ctx, clusterversion.MVCCAddSSTable) {
return nil, errors.Newf(
"cannot use %s until version %s", importAtNow.Key(), clusterversion.MVCCAddSSTable.String(),
)
}
writeAtRequestTime = true
writeAtRequestTime := true
if !importAtNow.Get(&flowCtx.Cfg.Settings.SV) {
log.Warningf(ctx, "ingesting import data with raw timestamps due to cluster setting")
writeAtRequestTime = false
} else if !flowCtx.Cfg.Settings.Version.IsActive(ctx, clusterversion.MVCCAddSSTable) {
log.Warningf(ctx, "ingesting import data with raw timestamps due to cluster version")
writeAtRequestTime = false
}

flushSize := func() int64 { return storageccl.MaxIngestBatchSize(flowCtx.Cfg.Settings) }
Expand Down
10 changes: 5 additions & 5 deletions pkg/sql/colexec/COLEXEC.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@ def gen_sort_partitioner_rule(name, target, visibility = ["//visibility:private"
name = name,
srcs = ["//pkg/sql/colexec/colexecbase:distinct_tmpl.go"],
outs = [target],
cmd = """
$(location :execgen) -template $(SRCS) \
-fmt=false pkg/sql/colexec/$@ > $@
$(location :goimports) -w $@
""",
cmd = """\
export COCKROACH_INTERNAL_DISABLE_METAMORPHIC_TESTING=true
$(location :execgen) -template $(SRCS) -fmt=false pkg/sql/colexec/$@ > $@
$(location :goimports) -w $@
""",
tools = [":execgen", ":goimports"],
)
10 changes: 5 additions & 5 deletions pkg/sql/parser/sql.y
Original file line number Diff line number Diff line change
Expand Up @@ -811,7 +811,7 @@ func (u *sqlSymUnion) setVar() *tree.SetVar {

%token <str> IDENTITY
%token <str> IF IFERROR IFNULL IGNORE_FOREIGN_KEYS ILIKE IMMEDIATE IMPORT IN INCLUDE
%token <str> INCLUDING INCREMENT INCREMENTAL INCREMENTAL_STORAGE
%token <str> INCLUDING INCREMENT INCREMENTAL INCREMENTAL_LOCATION
%token <str> INET INET_CONTAINED_BY_OR_EQUALS
%token <str> INET_CONTAINS_OR_EQUALS INDEX INDEXES INHERITS INJECT INITIALLY
%token <str> INNER INSERT INT INTEGER
Expand Down Expand Up @@ -2720,7 +2720,7 @@ opt_clear_data:
// encryption_passphrase="secret": encrypt backups
// kms="[kms_provider]://[kms_host]/[master_key_identifier]?[parameters]" : encrypt backups using KMS
// detached: execute backup job asynchronously, without waiting for its completion
// incremental_storage: specify a different path to store the incremental backup
// incremental_location: specify a different path to store the incremental backup
//
// %SeeAlso: RESTORE, WEBDOCS/backup.html
backup_stmt:
Expand Down Expand Up @@ -2826,7 +2826,7 @@ backup_options:
{
$$.val = &tree.BackupOptions{EncryptionKMSURI: $3.stringOrPlaceholderOptList()}
}
| INCREMENTAL_STORAGE '=' string_or_placeholder_opt_list
| INCREMENTAL_LOCATION '=' string_or_placeholder_opt_list
{
$$.val = &tree.BackupOptions{IncrementalStorage: $3.stringOrPlaceholderOptList()}
}
Expand Down Expand Up @@ -3159,7 +3159,7 @@ restore_options:
{
$$.val = &tree.RestoreOptions{NewDBName: $3.expr()}
}
| INCREMENTAL_STORAGE '=' string_or_placeholder_opt_list
| INCREMENTAL_LOCATION '=' string_or_placeholder_opt_list
{
$$.val = &tree.RestoreOptions{IncrementalStorage: $3.stringOrPlaceholderOptList()}
}
Expand Down Expand Up @@ -13537,7 +13537,7 @@ unreserved_keyword:
| INCLUDING
| INCREMENT
| INCREMENTAL
| INCREMENTAL_STORAGE
| INCREMENTAL_LOCATION
| INDEXES
| INHERITS
| INJECT
Expand Down
20 changes: 10 additions & 10 deletions pkg/sql/parser/testdata/backup_restore
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,12 @@ BACKUP TABLE foo INTO LATEST IN '_' -- literals removed
BACKUP TABLE _ INTO LATEST IN 'bar' -- identifiers removed

parse
BACKUP TABLE foo INTO LATEST IN 'bar' WITH incremental_storage = 'baz'
BACKUP TABLE foo INTO LATEST IN 'bar' WITH incremental_location = 'baz'
----
BACKUP TABLE foo INTO LATEST IN 'bar' WITH incremental_storage = 'baz'
BACKUP TABLE (foo) INTO LATEST IN ('bar') WITH incremental_storage = ('baz') -- fully parenthesized
BACKUP TABLE foo INTO LATEST IN '_' WITH incremental_storage = '_' -- literals removed
BACKUP TABLE _ INTO LATEST IN 'bar' WITH incremental_storage = 'baz' -- identifiers removed
BACKUP TABLE foo INTO LATEST IN 'bar' WITH incremental_location = 'baz'
BACKUP TABLE (foo) INTO LATEST IN ('bar') WITH incremental_location = ('baz') -- fully parenthesized
BACKUP TABLE foo INTO LATEST IN '_' WITH incremental_location = '_' -- literals removed
BACKUP TABLE _ INTO LATEST IN 'bar' WITH incremental_location = 'baz' -- identifiers removed

parse
BACKUP TABLE foo INTO 'subdir' IN 'bar'
Expand Down Expand Up @@ -487,12 +487,12 @@ RESTORE DATABASE foo FROM '_' WITH new_db_name = '_' -- literals removed
RESTORE DATABASE _ FROM 'bar' WITH new_db_name = 'baz' -- identifiers removed

parse
RESTORE DATABASE foo FROM 'bar' IN LATEST WITH incremental_storage = 'baz'
RESTORE DATABASE foo FROM 'bar' IN LATEST WITH incremental_location = 'baz'
----
RESTORE DATABASE foo FROM 'bar' IN 'latest' WITH incremental_storage = 'baz' -- normalized!
RESTORE DATABASE foo FROM ('bar') IN ('latest') WITH incremental_storage = ('baz') -- fully parenthesized
RESTORE DATABASE foo FROM '_' IN '_' WITH incremental_storage = '_' -- literals removed
RESTORE DATABASE _ FROM 'bar' IN 'latest' WITH incremental_storage = 'baz' -- identifiers removed
RESTORE DATABASE foo FROM 'bar' IN 'latest' WITH incremental_location = 'baz' -- normalized!
RESTORE DATABASE foo FROM ('bar') IN ('latest') WITH incremental_location = ('baz') -- fully parenthesized
RESTORE DATABASE foo FROM '_' IN '_' WITH incremental_location = '_' -- literals removed
RESTORE DATABASE _ FROM 'bar' IN 'latest' WITH incremental_location = 'baz' -- identifiers removed

parse
RESTORE DATABASE foo, baz FROM 'bar' AS OF SYSTEM TIME '1'
Expand Down
8 changes: 4 additions & 4 deletions pkg/sql/sem/tree/backup.go
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ func (o *BackupOptions) Format(ctx *FmtCtx) {

if o.IncrementalStorage != nil {
maybeAddSep()
ctx.WriteString("incremental_storage = ")
ctx.WriteString("incremental_location = ")
ctx.FormatNode(&o.IncrementalStorage)
}
}
Expand Down Expand Up @@ -306,7 +306,7 @@ func (o *BackupOptions) CombineWith(other *BackupOptions) error {
if o.IncrementalStorage == nil {
o.IncrementalStorage = other.IncrementalStorage
} else if other.IncrementalStorage != nil {
return errors.New("incremental_storage option specified multiple times")
return errors.New("incremental_location option specified multiple times")
}

return nil
Expand Down Expand Up @@ -391,7 +391,7 @@ func (o *RestoreOptions) Format(ctx *FmtCtx) {
}
if o.IncrementalStorage != nil {
maybeAddSep()
ctx.WriteString("incremental_storage = ")
ctx.WriteString("incremental_location = ")
ctx.FormatNode(&o.IncrementalStorage)
}
}
Expand Down Expand Up @@ -480,7 +480,7 @@ func (o *RestoreOptions) CombineWith(other *RestoreOptions) error {
if o.IncrementalStorage == nil {
o.IncrementalStorage = other.IncrementalStorage
} else if other.IncrementalStorage != nil {
return errors.New("incremental_storage option specified multiple times")
return errors.New("incremental_location option specified multiple times")
}

return nil
Expand Down
5 changes: 4 additions & 1 deletion pkg/util/fsm/gen/REPORTS.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,10 @@ def gen_reports(name, dep, transitions_variable, starting_state_name):
lower = transitions_variable.lower()
native.genrule(
name = name,
cmd = "$(location :{template_name}_bin) $(location {lower}_diagram.gv) $(location {lower}_report.txt) 'bazel build {name}'".format(
cmd = """\
export COCKROACH_INTERNAL_DISABLE_METAMORPHIC_TESTING=true
$(location :{template_name}_bin) $(location {lower}_diagram.gv) $(location {lower}_report.txt) 'bazel build {name}'
""".format(
template_name = template_name,
lower = lower,
name = name,
Expand Down

0 comments on commit eddc35f

Please sign in to comment.