chore(deps): bump github.com/moby/buildkit to 0.17.3

This commit is contained in:
CrazyMax
2024-12-19 17:38:32 +01:00
parent 61dac9fbfc
commit f0b4c3f2a4
44 changed files with 2284 additions and 376 deletions

10
go.mod
View File

@@ -27,7 +27,7 @@ require (
github.com/matcornic/hermes/v2 v2.1.0 github.com/matcornic/hermes/v2 v2.1.0
github.com/matrix-org/gomatrix v0.0.0-20210324163249-be2af5ef2e16 github.com/matrix-org/gomatrix v0.0.0-20210324163249-be2af5ef2e16
github.com/microcosm-cc/bluemonday v1.0.27 github.com/microcosm-cc/bluemonday v1.0.27
github.com/moby/buildkit v0.13.2 github.com/moby/buildkit v0.17.3
github.com/nlopes/slack v0.6.0 github.com/nlopes/slack v0.6.0
github.com/opencontainers/go-digest v1.0.0 github.com/opencontainers/go-digest v1.0.0
github.com/opencontainers/image-spec v1.1.0 github.com/opencontainers/image-spec v1.1.0
@@ -86,7 +86,7 @@ require (
github.com/golang/protobuf v1.5.4 // indirect github.com/golang/protobuf v1.5.4 // indirect
github.com/google/gnostic-models v0.6.8 // indirect github.com/google/gnostic-models v0.6.8 // indirect
github.com/google/gofuzz v1.2.0 // indirect github.com/google/gofuzz v1.2.0 // indirect
github.com/google/pprof v0.0.0-20230323073829-e72429f035bd // indirect github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6 // indirect
github.com/google/uuid v1.6.0 // indirect github.com/google/uuid v1.6.0 // indirect
github.com/gorilla/css v1.0.1 // indirect github.com/gorilla/css v1.0.1 // indirect
github.com/gorilla/mux v1.8.1 // indirect github.com/gorilla/mux v1.8.1 // indirect
@@ -119,10 +119,12 @@ require (
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/olekukonko/tablewriter v0.0.1 // indirect github.com/olekukonko/tablewriter v0.0.1 // indirect
github.com/opencontainers/runtime-spec v1.2.0 // indirect github.com/opencontainers/runtime-spec v1.2.0 // indirect
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/rivo/uniseg v0.4.7 // indirect github.com/rivo/uniseg v0.4.7 // indirect
github.com/spf13/pflag v1.0.5 // indirect github.com/spf13/pflag v1.0.5 // indirect
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4 // indirect
github.com/vanng822/css v0.0.0-20190504095207-a21e860bcd04 // indirect github.com/vanng822/css v0.0.0-20190504095207-a21e860bcd04 // indirect
github.com/vanng822/go-premailer v0.0.0-20191214114701-be27abe028fe // indirect github.com/vanng822/go-premailer v0.0.0-20191214114701-be27abe028fe // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 // indirect
@@ -131,12 +133,12 @@ require (
go.opentelemetry.io/otel/trace v1.28.0 // indirect go.opentelemetry.io/otel/trace v1.28.0 // indirect
golang.org/x/crypto v0.31.0 // indirect golang.org/x/crypto v0.31.0 // indirect
golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect
golang.org/x/net v0.28.0 // indirect golang.org/x/net v0.29.0 // indirect
golang.org/x/oauth2 v0.23.0 // indirect golang.org/x/oauth2 v0.23.0 // indirect
golang.org/x/sync v0.10.0 // indirect golang.org/x/sync v0.10.0 // indirect
golang.org/x/term v0.27.0 // indirect golang.org/x/term v0.27.0 // indirect
golang.org/x/text v0.21.0 // indirect golang.org/x/text v0.21.0 // indirect
golang.org/x/time v0.3.0 // indirect golang.org/x/time v0.6.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df // indirect gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df // indirect

22
go.sum
View File

@@ -138,8 +138,8 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg=
github.com/google/pprof v0.0.0-20230323073829-e72429f035bd h1:r8yyd+DJDmsUhGrRBxH5Pj7KeFK5l+Y3FsgT8keqKtk= github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6 h1:k7nVchz72niMH6YLQNvHSdIE7iqsQxK1P41mySCvssg=
github.com/google/pprof v0.0.0-20230323073829-e72429f035bd/go.mod h1:79YE0hCXdHag9sBkw2o+N/YnZtTkXi0UT9Nnixa5eYk= github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -230,8 +230,8 @@ github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJ
github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/moby/buildkit v0.13.2 h1:nXNszM4qD9E7QtG7bFWPnDI1teUQFQglBzon/IU3SzI= github.com/moby/buildkit v0.17.3 h1:XN8ddC5gO1kGJJfi86kzvDlPOyLyPk66hTvswqhj6NQ=
github.com/moby/buildkit v0.13.2/go.mod h1:2cyVOv9NoHM7arphK9ZfHIWKn9YVZRFd1wXB8kKmEzY= github.com/moby/buildkit v0.17.3/go.mod h1:vr5vltV8wt4F2jThbNOChfbAklJ0DOW11w36v210hOg=
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
github.com/moby/sys/capability v0.3.0 h1:kEP+y6te0gEXIaeQhIi0s7vKs/w0RPoH1qPa6jROcVg= github.com/moby/sys/capability v0.3.0 h1:kEP+y6te0gEXIaeQhIi0s7vKs/w0RPoH1qPa6jROcVg=
@@ -272,6 +272,8 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.7.0 h1:hnbDkaNWPCLMO9wGLdBFTIZvzDrDfBM2072E1S9gJkA= github.com/pkg/profile v1.7.0 h1:hnbDkaNWPCLMO9wGLdBFTIZvzDrDfBM2072E1S9gJkA=
github.com/pkg/profile v1.7.0/go.mod h1:8Uer0jas47ZQMJ7VD+OHknK4YDY07LPUC6dEvqDjvNo= github.com/pkg/profile v1.7.0/go.mod h1:8Uer0jas47ZQMJ7VD+OHknK4YDY07LPUC6dEvqDjvNo=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
@@ -319,6 +321,8 @@ github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOf
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4 h1:7I5c2Ig/5FgqkYOh/N87NzoyI9U15qUPXhDD8uCupv8=
github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4/go.mod h1:278M4p8WsNh3n4a1eqiFcV2FGk7wE5fwUpUom9mK9lE=
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc= github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/vanng822/css v0.0.0-20190504095207-a21e860bcd04 h1:L0rPdfzq43+NV8rfIx2kA4iSSLRj2jN5ijYHoeXRwvQ= github.com/vanng822/css v0.0.0-20190504095207-a21e860bcd04 h1:L0rPdfzq43+NV8rfIx2kA4iSSLRj2jN5ijYHoeXRwvQ=
@@ -377,8 +381,8 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo=
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0=
golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs= golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs=
golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -424,8 +428,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
@@ -438,7 +442,7 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/genproto v0.0.0-20231016165738-49dd2c1f3d0b h1:+YaDE2r2OG8t/z5qmsh7Y+XXwCbvadxxZ0YY6mTdrVA= google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 h1:KAeGQVN3M9nD0/bQXnr/ClcEMJ968gUXJQ9pwfSynuQ=
google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142 h1:wKguEg1hsxI2/L3hUYrpo1RVi48K+uTyzKqprwLXsb8= google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142 h1:wKguEg1hsxI2/L3hUYrpo1RVi48K+uTyzKqprwLXsb8=
google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142/go.mod h1:d6be+8HhtEtucleCbxpPW9PA9XwISACu8nvpPqF0BVo= google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142/go.mod h1:d6be+8HhtEtucleCbxpPW9PA9XwISACu8nvpPqF0BVo=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ= google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ=

View File

@@ -14,7 +14,7 @@ import (
type Client struct { type Client struct {
ast *parser.Node ast *parser.Node
stages []instructions.Stage stages []instructions.Stage
metaArgs []instructions.KeyValuePairOptional metaArgs shell.EnvGetter
shlex *shell.Lex shlex *shell.Lex
} }
@@ -35,26 +35,28 @@ func New(opts Options) (*Client, error) {
return nil, errors.Wrapf(err, "cannot parse Dockerfile %s", opts.Filename) return nil, errors.Wrapf(err, "cannot parse Dockerfile %s", opts.Filename)
} }
stages, metaArgs, err := instructions.Parse(parsed.AST) stages, metaArgs, err := instructions.Parse(parsed.AST, nil)
if err != nil { if err != nil {
return nil, errors.Wrapf(err, "cannot parse stages for Dockerfile %s", opts.Filename) return nil, errors.Wrapf(err, "cannot parse stages for Dockerfile %s", opts.Filename)
} }
var kvpoArgs []instructions.KeyValuePairOptional var kvpoArgs []string
shlex := shell.NewLex(parsed.EscapeToken) shlex := shell.NewLex(parsed.EscapeToken)
for _, cmd := range metaArgs { for _, cmd := range metaArgs {
for _, metaArg := range cmd.Args { for _, metaArg := range cmd.Args {
if metaArg.Value != nil { if metaArg.Value != nil {
*metaArg.Value, _ = shlex.ProcessWordWithMap(*metaArg.Value, metaArgsToMap(kvpoArgs)) if name, _, err := shlex.ProcessWord(*metaArg.Value, shell.EnvsFromSlice(kvpoArgs)); err == nil {
metaArg.Value = &name
} }
kvpoArgs = append(kvpoArgs, metaArg) }
kvpoArgs = append(kvpoArgs, metaArg.String())
} }
} }
return &Client{ return &Client{
ast: parsed.AST, ast: parsed.AST,
stages: stages, stages: stages,
metaArgs: kvpoArgs, metaArgs: shell.EnvsFromSlice(kvpoArgs),
shlex: shlex, shlex: shlex,
}, nil }, nil
} }
@@ -67,11 +69,3 @@ func (c *Client) isStageName(name string) bool {
} }
return false return false
} }
func metaArgsToMap(metaArgs []instructions.KeyValuePairOptional) map[string]string {
m := map[string]string{}
for _, arg := range metaArgs {
m[arg.Key] = arg.ValueString()
}
return m
}

View File

@@ -38,7 +38,7 @@ func (c *Client) FromImages() (Images, error) {
return nil, errors.Wrapf(err, "cannot parse instruction") return nil, errors.Wrapf(err, "cannot parse instruction")
} }
if baseName := ins.(*instructions.Stage).BaseName; baseName != "scratch" { if baseName := ins.(*instructions.Stage).BaseName; baseName != "scratch" {
name, err := c.shlex.ProcessWordWithMap(baseName, metaArgsToMap(c.metaArgs)) name, _, err := c.shlex.ProcessWord(baseName, c.metaArgs)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -59,7 +59,7 @@ func (c *Client) FromImages() (Images, error) {
return nil, errors.Wrapf(err, "cannot parse command") return nil, errors.Wrapf(err, "cannot parse command")
} }
if copyFrom := cmd.(*instructions.CopyCommand).From; copyFrom != "null" { if copyFrom := cmd.(*instructions.CopyCommand).From; copyFrom != "null" {
name, err := c.shlex.ProcessWordWithMap(copyFrom, metaArgsToMap(c.metaArgs)) name, _, err := c.shlex.ProcessWord(copyFrom, c.metaArgs)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -85,7 +85,7 @@ func (c *Client) FromImages() (Images, error) {
if mount.Type != instructions.MountTypeBind || len(mount.From) == 0 { if mount.Type != instructions.MountTypeBind || len(mount.From) == 0 {
continue continue
} }
name, err := c.shlex.ProcessWordWithMap(mount.From, metaArgsToMap(c.metaArgs)) name, _, err := c.shlex.ProcessWord(mount.From, c.metaArgs)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -258,10 +258,10 @@ func (p *Profile) postDecode() error {
// If this a main linux kernel mapping with a relocation symbol suffix // If this a main linux kernel mapping with a relocation symbol suffix
// ("[kernel.kallsyms]_text"), extract said suffix. // ("[kernel.kallsyms]_text"), extract said suffix.
// It is fairly hacky to handle at this level, but the alternatives appear even worse. // It is fairly hacky to handle at this level, but the alternatives appear even worse.
if strings.HasPrefix(m.File, "[kernel.kallsyms]") { const prefix = "[kernel.kallsyms]"
m.KernelRelocationSymbol = strings.ReplaceAll(m.File, "[kernel.kallsyms]", "") if strings.HasPrefix(m.File, prefix) {
m.KernelRelocationSymbol = m.File[len(prefix):]
} }
} }
functions := make(map[uint64]*Function, len(p.Function)) functions := make(map[uint64]*Function, len(p.Function))
@@ -530,6 +530,7 @@ func (p *Line) decoder() []decoder {
func (p *Line) encode(b *buffer) { func (p *Line) encode(b *buffer) {
encodeUint64Opt(b, 1, p.functionIDX) encodeUint64Opt(b, 1, p.functionIDX)
encodeInt64Opt(b, 2, p.Line) encodeInt64Opt(b, 2, p.Line)
encodeInt64Opt(b, 3, p.Column)
} }
var lineDecoder = []decoder{ var lineDecoder = []decoder{
@@ -538,6 +539,8 @@ var lineDecoder = []decoder{
func(b *buffer, m message) error { return decodeUint64(b, &m.(*Line).functionIDX) }, func(b *buffer, m message) error { return decodeUint64(b, &m.(*Line).functionIDX) },
// optional int64 line = 2 // optional int64 line = 2
func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Line) }, func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Line) },
// optional int64 column = 3
func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Column) },
} }
func (p *Function) decoder() []decoder { func (p *Function) decoder() []decoder {

View File

@@ -56,7 +56,7 @@ func javaCPUProfile(b []byte, period int64, parse func(b []byte) (uint64, []byte
} }
// Strip out addresses for better merge. // Strip out addresses for better merge.
if err = p.Aggregate(true, true, true, true, false); err != nil { if err = p.Aggregate(true, true, true, true, false, false); err != nil {
return nil, err return nil, err
} }
@@ -99,7 +99,7 @@ func parseJavaProfile(b []byte) (*Profile, error) {
} }
// Strip out addresses for better merge. // Strip out addresses for better merge.
if err = p.Aggregate(true, true, true, true, false); err != nil { if err = p.Aggregate(true, true, true, true, false, false); err != nil {
return nil, err return nil, err
} }

View File

@@ -326,12 +326,13 @@ func (l *Location) key() locationKey {
key.addr -= l.Mapping.Start key.addr -= l.Mapping.Start
key.mappingID = l.Mapping.ID key.mappingID = l.Mapping.ID
} }
lines := make([]string, len(l.Line)*2) lines := make([]string, len(l.Line)*3)
for i, line := range l.Line { for i, line := range l.Line {
if line.Function != nil { if line.Function != nil {
lines[i*2] = strconv.FormatUint(line.Function.ID, 16) lines[i*2] = strconv.FormatUint(line.Function.ID, 16)
} }
lines[i*2+1] = strconv.FormatInt(line.Line, 16) lines[i*2+1] = strconv.FormatInt(line.Line, 16)
lines[i*2+2] = strconv.FormatInt(line.Column, 16)
} }
key.lines = strings.Join(lines, "|") key.lines = strings.Join(lines, "|")
return key return key
@@ -418,6 +419,7 @@ func (pm *profileMerger) mapLine(src Line) Line {
ln := Line{ ln := Line{
Function: pm.mapFunction(src.Function), Function: pm.mapFunction(src.Function),
Line: src.Line, Line: src.Line,
Column: src.Column,
} }
return ln return ln
} }

View File

@@ -145,6 +145,7 @@ type Location struct {
type Line struct { type Line struct {
Function *Function Function *Function
Line int64 Line int64
Column int64
functionIDX uint64 functionIDX uint64
} }
@@ -436,7 +437,7 @@ func (p *Profile) CheckValid() error {
// Aggregate merges the locations in the profile into equivalence // Aggregate merges the locations in the profile into equivalence
// classes preserving the request attributes. It also updates the // classes preserving the request attributes. It also updates the
// samples to point to the merged locations. // samples to point to the merged locations.
func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address bool) error { func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, columnnumber, address bool) error {
for _, m := range p.Mapping { for _, m := range p.Mapping {
m.HasInlineFrames = m.HasInlineFrames && inlineFrame m.HasInlineFrames = m.HasInlineFrames && inlineFrame
m.HasFunctions = m.HasFunctions && function m.HasFunctions = m.HasFunctions && function
@@ -458,7 +459,7 @@ func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address
} }
// Aggregate locations // Aggregate locations
if !inlineFrame || !address || !linenumber { if !inlineFrame || !address || !linenumber || !columnnumber {
for _, l := range p.Location { for _, l := range p.Location {
if !inlineFrame && len(l.Line) > 1 { if !inlineFrame && len(l.Line) > 1 {
l.Line = l.Line[len(l.Line)-1:] l.Line = l.Line[len(l.Line)-1:]
@@ -466,6 +467,12 @@ func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address
if !linenumber { if !linenumber {
for i := range l.Line { for i := range l.Line {
l.Line[i].Line = 0 l.Line[i].Line = 0
l.Line[i].Column = 0
}
}
if !columnnumber {
for i := range l.Line {
l.Line[i].Column = 0
} }
} }
if !address { if !address {
@@ -627,10 +634,11 @@ func (l *Location) string() string {
for li := range l.Line { for li := range l.Line {
lnStr := "??" lnStr := "??"
if fn := l.Line[li].Function; fn != nil { if fn := l.Line[li].Function; fn != nil {
lnStr = fmt.Sprintf("%s %s:%d s=%d", lnStr = fmt.Sprintf("%s %s:%d:%d s=%d",
fn.Name, fn.Name,
fn.Filename, fn.Filename,
l.Line[li].Line, l.Line[li].Line,
l.Line[li].Column,
fn.StartLine) fn.StartLine)
if fn.Name != fn.SystemName { if fn.Name != fn.SystemName {
lnStr = lnStr + "(" + fn.SystemName + ")" lnStr = lnStr + "(" + fn.SystemName + ")"

View File

@@ -6,11 +6,16 @@ Aaron L. Xu <likexu@harmonycloud.cn>
Aaron Lehmann <aaron.lehmann@docker.com> Aaron Lehmann <aaron.lehmann@docker.com>
Aaron Lehmann <alehmann@netflix.com> Aaron Lehmann <alehmann@netflix.com>
Abdur Rehman <abdur_rehman@mentor.com> Abdur Rehman <abdur_rehman@mentor.com>
adamperlin <adamp@nanosoft.com>
Addam Hardy <addam.hardy@gmail.com> Addam Hardy <addam.hardy@gmail.com>
Adrian Plata <adrian.plata@docker.com> Adrian Plata <adrian.plata@docker.com>
Adrien Delorme <azr@users.noreply.github.com>
Ahmon Dancy <adancy@wikimedia.org>
Aidan Hobson Sayers <aidanhs@cantab.net> Aidan Hobson Sayers <aidanhs@cantab.net>
Akihiro Suda <akihiro.suda.cz@hco.ntt.co.jp> Akihiro Suda <akihiro.suda.cz@hco.ntt.co.jp>
Alan Fregtman <941331+darkvertex@users.noreply.github.com> Alan Fregtman <941331+darkvertex@users.noreply.github.com>
Alano Terblanche <18033717+Benehiko@users.noreply.github.com>
Aleksa Sarai <cyphar@cyphar.com>
Alex Couture-Beil <alex@earthly.dev> Alex Couture-Beil <alex@earthly.dev>
Alex Mayer <amayer5125@gmail.com> Alex Mayer <amayer5125@gmail.com>
Alex Suraci <suraci.alex@gmail.com> Alex Suraci <suraci.alex@gmail.com>
@@ -29,17 +34,27 @@ Andrey Smirnov <smirnov.andrey@gmail.com>
Andy Alt <andy5995@users.noreply.github.com> Andy Alt <andy5995@users.noreply.github.com>
Andy Caldwell <andrew.caldwell@metaswitch.com> Andy Caldwell <andrew.caldwell@metaswitch.com>
Ankush Agarwal <ankushagarwal11@gmail.com> Ankush Agarwal <ankushagarwal11@gmail.com>
Anthony Nandaa <profnandaa@gmail.com>
Anthony Sottile <asottile@umich.edu> Anthony Sottile <asottile@umich.edu>
Anurag Goel <anurag@render.com> Anurag Goel <anurag@render.com>
Anusha Ragunathan <anusha@docker.com> Anusha Ragunathan <anusha@docker.com>
Arkadiusz Drabczyk <arkadiusz@drabczyk.org>
Arnaldo Garcia Rincon <agarrcia@amazon.com>
Arnaud Bailly <arnaud.oqube@gmail.com> Arnaud Bailly <arnaud.oqube@gmail.com>
Artem Khramov <akhramov@pm.me>
Austin Vazquez <macedonv@amazon.com>
Avi Deitcher <avi@deitcher.net> Avi Deitcher <avi@deitcher.net>
Bastiaan Bakker <bbakker@xebia.com> Bastiaan Bakker <bbakker@xebia.com>
Ben Longo <benlongo9807@gmail.com> Ben Longo <benlongo9807@gmail.com>
Bertrand Paquet <bertrand.paquet@gmail.com> Bertrand Paquet <bertrand.paquet@gmail.com>
Billy Owire <billyowire@microsoft.com>
Bin Liu <liubin0329@gmail.com> Bin Liu <liubin0329@gmail.com>
Bjorn Neergaard <bjorn.neergaard@docker.com>
Brandon Mitchell <git@bmitch.net> Brandon Mitchell <git@bmitch.net>
Brennan Kinney <5098581+polarathene@users.noreply.github.com>
Brian Goff <cpuguy83@gmail.com> Brian Goff <cpuguy83@gmail.com>
Bunyamin Dokmetas <19335284+ztzxt@users.noreply.github.com>
Burt Holzman <burt@fnal.gov>
Ce Gao <ce.gao@outlook.com> Ce Gao <ce.gao@outlook.com>
Chaerim Yeo <yeochaerim@gmail.com> Chaerim Yeo <yeochaerim@gmail.com>
Changwei Ge <gechangwei@bytedance.com> Changwei Ge <gechangwei@bytedance.com>
@@ -60,8 +75,10 @@ Corey Larson <corey@earthly.dev>
Cory Bennett <cbennett@netflix.com> Cory Bennett <cbennett@netflix.com>
Cory Snider <csnider@mirantis.com> Cory Snider <csnider@mirantis.com>
coryb <cbennett@netflix.com> coryb <cbennett@netflix.com>
Craig Andrews <candrews@integralblue.com>
CrazyMax <github@crazymax.dev> CrazyMax <github@crazymax.dev>
Csaba Apagyi <csaba.apagyi@gmail.com> Csaba Apagyi <csaba.apagyi@gmail.com>
cuiyourong <cuiyourong@gmail.com>
Dan Duvall <dduvall@wikimedia.org> Dan Duvall <dduvall@wikimedia.org>
Daniel Cassidy <mail@danielcassidy.me.uk> Daniel Cassidy <mail@danielcassidy.me.uk>
Daniel Nephin <dnephin@gmail.com> Daniel Nephin <dnephin@gmail.com>
@@ -74,9 +91,11 @@ David Dooling <dooling@gmail.com>
David Gageot <david.gageot@docker.com> David Gageot <david.gageot@docker.com>
David Karlsson <david.karlsson@docker.com> David Karlsson <david.karlsson@docker.com>
Davis Schirmer <djds@bghost.xyz> Davis Schirmer <djds@bghost.xyz>
Debosmit Ray <dray92@uw.edu>
Dennis Chen <dennis.chen@arm.com> Dennis Chen <dennis.chen@arm.com>
Dennis Haney <davh@davh.dk>
dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Derek McGowan <derek@mcgstyle.net> Derek McGowan <derek@mcg.dev>
Dharmit Shah <shahdharmit@gmail.com> Dharmit Shah <shahdharmit@gmail.com>
Ding Fei <dingfei@stars.org.cn> Ding Fei <dingfei@stars.org.cn>
dito <itodaisuke00@gmail.com> dito <itodaisuke00@gmail.com>
@@ -86,16 +105,21 @@ Eli Uriegas <eli.uriegas@docker.com>
Elias Faxö <elias.faxo@tre.se> Elias Faxö <elias.faxo@tre.se>
Eng Zer Jun <engzerjun@gmail.com> Eng Zer Jun <engzerjun@gmail.com>
Eric Engestrom <eric@engestrom.ch> Eric Engestrom <eric@engestrom.ch>
Erik McKelvey <Erik.McKelvey.is@gmail.com>
Erik Sipsma <erik@sipsma.dev> Erik Sipsma <erik@sipsma.dev>
eyherabh <hugogabriel.eyherabide@gmail.com> eyherabh <hugogabriel.eyherabide@gmail.com>
f0 <f0@users.noreply.github.com> f0 <f0@users.noreply.github.com>
fanjiyun.fjy <fanjiyun.fjy@alibaba-inc.com>
Felix Fontein <felix@fontein.de>
Fernando Miguel <github@FernandoMiguel.net> Fernando Miguel <github@FernandoMiguel.net>
Fiona Klute <fiona.klute@gmx.de> Fiona Klute <fiona.klute@gmx.de>
Foysal Iqbal <foysal.iqbal.fb@gmail.com> Foysal Iqbal <foysal.iqbal.fb@gmail.com>
Frank Villaro-Dixon <frank.villarodixon@merkle.com>
frankyang <yyb196@gmail.com>
Fred Cox <mcfedr@gmail.com> Fred Cox <mcfedr@gmail.com>
Frieder Bluemle <frieder.bluemle@gmail.com> Frieder Bluemle <frieder.bluemle@gmail.com>
Gabriel <samfiragabriel@gmail.com> Félix Mattrat <felix@dysosmus.net>
Gabriel Adrian Samfira <gsamfira@cloudbasesolutions.com> Gabriel-Adrian Samfira <samfiragabriel@gmail.com>
Gaetan de Villele <gdevillele@gmail.com> Gaetan de Villele <gdevillele@gmail.com>
Gahl Saraf <saraf.gahl@gmail.com> Gahl Saraf <saraf.gahl@gmail.com>
genglu.gl <luzigeng32@163.com> genglu.gl <luzigeng32@163.com>
@@ -103,7 +127,10 @@ George <george@betterde.com>
ggjulio <juligonz@student.42.fr> ggjulio <juligonz@student.42.fr>
Govind Rai <raigovind93@gmail.com> Govind Rai <raigovind93@gmail.com>
Grant Reaber <grant.reaber@gmail.com> Grant Reaber <grant.reaber@gmail.com>
Guilhem C <guilhem.charles@gmail.com> Grégoire Payen de La Garanderie <gregoire.payen.de.la.garanderie@intel.com>
guangwu <guoguangwu@magic-shield.com>
Guilhem Charles <guilhem.charles@gmail.com>
guoguangwu <guoguangwug@gmail.com>
Hans van den Bogert <hansbogert@gmail.com> Hans van den Bogert <hansbogert@gmail.com>
Hao Hu <hao.hu.fr@gmail.com> Hao Hu <hao.hu.fr@gmail.com>
Hector S <hfsam88@gmail.com> Hector S <hfsam88@gmail.com>
@@ -112,13 +139,19 @@ Himanshu Pandey <hpandey@pivotal.io>
Hiromu Nakamura <abctail30@gmail.com> Hiromu Nakamura <abctail30@gmail.com>
HowJMay <vulxj0j8j8@gmail.com> HowJMay <vulxj0j8j8@gmail.com>
Hugo Santos <hugo@namespacelabs.com> Hugo Santos <hugo@namespacelabs.com>
Höhl, Lukas <lukas.hoehl@accso.de>
Ian Campbell <ijc@docker.com> Ian Campbell <ijc@docker.com>
Ian King'ori <kingorim.ian@gmail.com>
Ignas Mikalajūnas <ignas@devzero.io>
Ilya Dmitrichenko <errordeveloper@gmail.com> Ilya Dmitrichenko <errordeveloper@gmail.com>
Iskander (Alex) Sharipov <quasilyte@gmail.com> Iskander (Alex) Sharipov <quasilyte@gmail.com>
Jacob Gillespie <jacobwgillespie@gmail.com> Jacob Gillespie <jacobwgillespie@gmail.com>
Jacob MacElroy <jacob@okteto.com> Jacob MacElroy <jacob@okteto.com>
Jakub Ciolek <jakub@ciolek.dev>
James Carnegie <james.carnegie@docker.com>
Jean-Pierre Huynh <jean-pierre.huynh@ounet.fr> Jean-Pierre Huynh <jean-pierre.huynh@ounet.fr>
Jeffrey Huang <jeffreyhuang23@gmail.com> Jeffrey Huang <jeffreyhuang23@gmail.com>
Jesper Noordsij <jesper@sslleiden.nl>
Jesse Rittner <rittneje@gmail.com> Jesse Rittner <rittneje@gmail.com>
Jessica Frazelle <acidburn@microsoft.com> Jessica Frazelle <acidburn@microsoft.com>
jgeiger <jgeiger@gmail.com> jgeiger <jgeiger@gmail.com>
@@ -130,6 +163,7 @@ John Maguire <jmaguire@duosecurity.com>
John Mulhausen <john@docker.com> John Mulhausen <john@docker.com>
John Tims <john.k.tims@gmail.com> John Tims <john.k.tims@gmail.com>
Jon Zeolla <zeolla@gmail.com> Jon Zeolla <zeolla@gmail.com>
Jonathan A. Sternberg <jonathan.sternberg@docker.com>
Jonathan Azoff <azoff@users.noreply.github.com> Jonathan Azoff <azoff@users.noreply.github.com>
Jonathan Giannuzzi <jonathan@giannuzzi.me> Jonathan Giannuzzi <jonathan@giannuzzi.me>
Jonathan Stoppani <jonathan.stoppani@divio.com> Jonathan Stoppani <jonathan.stoppani@divio.com>
@@ -142,11 +176,14 @@ Justin Chadwell <me@jedevc.com>
Justin Cormack <justin.cormack@docker.com> Justin Cormack <justin.cormack@docker.com>
Justin Garrison <justin@linux.com> Justin Garrison <justin@linux.com>
Jörg Franke <359489+NewJorg@users.noreply.github.com> Jörg Franke <359489+NewJorg@users.noreply.github.com>
Kai Takac <kai.takac@gmail.com>
Kang, Matthew <impulsecss@gmail.com> Kang, Matthew <impulsecss@gmail.com>
Kazuyoshi Kato <kaz@fly.io>
Kees Cook <keescook@chromium.org> Kees Cook <keescook@chromium.org>
Kevin Burke <kev@inburke.com> Kevin Burke <kev@inburke.com>
kevinmeredith <kevin.m.meredith@gmail.com> kevinmeredith <kevin.m.meredith@gmail.com>
Kir Kolyshkin <kolyshkin@gmail.com> Kir Kolyshkin <kolyshkin@gmail.com>
Kirill A. Korinsky <kirill@korins.ky>
Kohei Tokunaga <ktokunaga.mail@gmail.com> Kohei Tokunaga <ktokunaga.mail@gmail.com>
Koichi Shiraishi <zchee.io@gmail.com> Koichi Shiraishi <zchee.io@gmail.com>
Kris-Mikael Krister <krismikael@protonmail.com> Kris-Mikael Krister <krismikael@protonmail.com>
@@ -155,7 +192,9 @@ Kyle <Kylemit@gmail.com>
l00397676 <lujingxiao@huawei.com> l00397676 <lujingxiao@huawei.com>
Lajos Papp <lalyos@yahoo.com> Lajos Papp <lalyos@yahoo.com>
lalyos <lalyos@yahoo.com> lalyos <lalyos@yahoo.com>
Leandro Santiago <leandrosansilva@gmail.com>
Levi Harrison <levisamuelharrison@gmail.com> Levi Harrison <levisamuelharrison@gmail.com>
liulanzheng <lanzheng.liulz@alibaba-inc.com>
liwenqi <vikilwq@zju.edu.cn> liwenqi <vikilwq@zju.edu.cn>
lixiaobing10051267 <li.xiaobing1@zte.com.cn> lixiaobing10051267 <li.xiaobing1@zte.com.cn>
lomot <lomot@qq.com> lomot <lomot@qq.com>
@@ -164,8 +203,10 @@ Luca Visentin <luck.visentin@gmail.com>
Maciej Kalisz <mdkalish@users.noreply.github.com> Maciej Kalisz <mdkalish@users.noreply.github.com>
Madhav Puri <madhav.puri@gmail.com> Madhav Puri <madhav.puri@gmail.com>
Manu Gupta <manugupt1@gmail.com> Manu Gupta <manugupt1@gmail.com>
Marat Radchenko <marat@slonopotamus.org>
Marcus Comstedt <marcus@mc.pp.se> Marcus Comstedt <marcus@mc.pp.se>
Mark Gordon <msg555@gmail.com> Mark Gordon <msg555@gmail.com>
Mark Yen <mark.yen@suse.com>
Marko Kohtala <marko.kohtala@gmail.com> Marko Kohtala <marko.kohtala@gmail.com>
Mary Anthony <mary@docker.com> Mary Anthony <mary@docker.com>
masibw <masi19bw@gmail.com> masibw <masi19bw@gmail.com>
@@ -181,19 +222,26 @@ Mihai Borobocea <MihaiBorob@gmail.com>
Mike Brown <brownwm@us.ibm.com> Mike Brown <brownwm@us.ibm.com>
mikelinjie <294893458@qq.com> mikelinjie <294893458@qq.com>
Mikhail Vasin <vasin@cloud-tv.ru> Mikhail Vasin <vasin@cloud-tv.ru>
Milas Bowman <milas.bowman@docker.com>
Misty Stanley-Jones <misty@docker.com> Misty Stanley-Jones <misty@docker.com>
Mitsuru Kariya <mitsuru.kariya@nttdata.com>
Miyachi Katsuya <miyachi_katsuya@r.recruit.co.jp> Miyachi Katsuya <miyachi_katsuya@r.recruit.co.jp>
Morgan Bauer <mbauer@us.ibm.com> Morgan Bauer <mbauer@us.ibm.com>
Moritz "WanzenBug" Wanzenböck <moritz@wanzenbug.xyz>
Morlay <morlay.null@gmail.com> Morlay <morlay.null@gmail.com>
msg <msg@clinc.com> msg <msg@clinc.com>
Nao YONASHIRO <yonashiro@r.recruit.co.jp> Nao YONASHIRO <yonashiro@r.recruit.co.jp>
Natasha Jarus <linuxmercedes@gmail.com> Natasha Jarus <linuxmercedes@gmail.com>
Nathan Sullivan <nathan@nightsys.net> Nathan Sullivan <nathan@nightsys.net>
Nguyễn Đức Chiến <nobi@nobidev.com>
Nick Miyake <nmiyake@users.noreply.github.com> Nick Miyake <nmiyake@users.noreply.github.com>
Nick Santos <nick.santos@docker.com> Nick Santos <nick.santos@docker.com>
Nikhil Pandeti <nikhil.pandeti@utexas.edu> Nikhil Pandeti <nikhil.pandeti@utexas.edu>
njucjc <njucjc@gmail.com>
Nobi <nobi@nobidev.com>
Noel Georgi <18496730+frezbo@users.noreply.github.com> Noel Georgi <18496730+frezbo@users.noreply.github.com>
Oliver Bristow <oliver.bristow@project-tracr.com> Oliver Bristow <oliver.bristow@project-tracr.com>
omahs <73983677+omahs@users.noreply.github.com>
Omer Duchovne <79370724+od-cyera@users.noreply.github.com> Omer Duchovne <79370724+od-cyera@users.noreply.github.com>
Omer Mizrahi <ommizrah@microsoft.com> Omer Mizrahi <ommizrah@microsoft.com>
Ondrej Fabry <ofabry@cisco.com> Ondrej Fabry <ofabry@cisco.com>
@@ -206,6 +254,7 @@ Paul "TBBle" Hampson <Paul.Hampson@Pobox.com>
Paweł Gronowski <pawel.gronowski@docker.com> Paweł Gronowski <pawel.gronowski@docker.com>
Peter Dave Hello <hsu@peterdavehello.org> Peter Dave Hello <hsu@peterdavehello.org>
Petr Fedchenkov <giggsoff@gmail.com> Petr Fedchenkov <giggsoff@gmail.com>
Petteri Räty <github@petteriraty.eu>
Phil Estes <estesp@gmail.com> Phil Estes <estesp@gmail.com>
Pierre Fenoll <pierrefenoll@gmail.com> Pierre Fenoll <pierrefenoll@gmail.com>
pieterdd <pieterdd@users.noreply.github.com> pieterdd <pieterdd@users.noreply.github.com>
@@ -213,18 +262,24 @@ Pranav Pandit <pranavp@microsoft.com>
Pratik Raj <rajpratik71@gmail.com> Pratik Raj <rajpratik71@gmail.com>
Prayag Verma <prayag.verma@gmail.com> Prayag Verma <prayag.verma@gmail.com>
Qiang Huang <h.huangqiang@huawei.com> Qiang Huang <h.huangqiang@huawei.com>
racequite <quiterace@gmail.com>
Remy Suen <remy.suen@gmail.com> Remy Suen <remy.suen@gmail.com>
Reshen <reshen817@gmail.com>
retornam <retornam@users.noreply.github.com>
Ri Xu <xuri.me@gmail.com> Ri Xu <xuri.me@gmail.com>
Rob Taylor <rob@shape.build> Rob Taylor <rob@shape.build>
Robert Estelle <robertestelle@gmail.com> Robert Estelle <robertestelle@gmail.com>
Rubens Figueiredo <r.figueiredo.52@gmail.com> Rubens Figueiredo <r.figueiredo.52@gmail.com>
Salim B <git@salim.space>
Sam Whited <sam@samwhited.com> Sam Whited <sam@samwhited.com>
Sascha Hemleb <github@sascha.hemleb.de>
Sascha Schwarze <schwarzs@de.ibm.com> Sascha Schwarze <schwarzs@de.ibm.com>
Sean P. Kane <spkane00@gmail.com> Sean P. Kane <spkane00@gmail.com>
Sebastiaan van Stijn <github@gone.nl> Sebastiaan van Stijn <github@gone.nl>
Seiya Miyata <odradek38@gmail.com> Seiya Miyata <odradek38@gmail.com>
Serhat Gülçiçek <serhat25@gmail.com> Serhat Gülçiçek <serhat25@gmail.com>
Sertac Ozercan <sozercan@gmail.com> Sertac Ozercan <sozercan@gmail.com>
Shaun Thompson <shaun.thompson@docker.com>
Shev Yan <yandong_8212@163.com> Shev Yan <yandong_8212@163.com>
Shijiang Wei <mountkin@gmail.com> Shijiang Wei <mountkin@gmail.com>
Shingo Omura <everpeace@gmail.com> Shingo Omura <everpeace@gmail.com>
@@ -239,10 +294,13 @@ Stefan Scherer <stefan.scherer@docker.com>
Stefan Weil <sw@weilnetz.de> Stefan Weil <sw@weilnetz.de>
StefanSchoof <Stefan.Schoof@direkt-gruppe.de> StefanSchoof <Stefan.Schoof@direkt-gruppe.de>
Stepan Blyshchak <stepanblischak@gmail.com> Stepan Blyshchak <stepanblischak@gmail.com>
Stephen Day <stephen.day@docker.com>
Steve Lohr <schdief.law@gmail.com> Steve Lohr <schdief.law@gmail.com>
sunchunming <sunchunming1@jd.com> sunchunming <sunchunming1@jd.com>
Sven Dowideit <SvenDowideit@home.org.au> Sven Dowideit <SvenDowideit@home.org.au>
Swagat Bora <sbora@amazon.com>
Takuya Noguchi <takninnovationresearch@gmail.com> Takuya Noguchi <takninnovationresearch@gmail.com>
Talon Bowler <talon.bowler@docker.com>
Thomas Leonard <thomas.leonard@docker.com> Thomas Leonard <thomas.leonard@docker.com>
Thomas Riccardi <riccardi@systran.fr> Thomas Riccardi <riccardi@systran.fr>
Thomas Shaw <tomwillfixit@users.noreply.github.com> Thomas Shaw <tomwillfixit@users.noreply.github.com>
@@ -256,6 +314,7 @@ Tobias Klauser <tklauser@distanz.ch>
Tomas Tomecek <ttomecek@redhat.com> Tomas Tomecek <ttomecek@redhat.com>
Tomasz Kopczynski <tomek@kopczynski.net.pl> Tomasz Kopczynski <tomek@kopczynski.net.pl>
Tomohiro Kusumoto <zabio1192@gmail.com> Tomohiro Kusumoto <zabio1192@gmail.com>
Tristan Stenner <ts@ppi.de>
Troels Liebe Bentsen <tlb@nversion.dk> Troels Liebe Bentsen <tlb@nversion.dk>
Tõnis Tiigi <tonistiigi@gmail.com> Tõnis Tiigi <tonistiigi@gmail.com>
Valentin Lorentz <progval+git@progval.net> Valentin Lorentz <progval+git@progval.net>
@@ -269,16 +328,21 @@ Wang Yumu <37442693@qq.com>
Wei Fu <fuweid89@gmail.com> Wei Fu <fuweid89@gmail.com>
Wei Zhang <kweizh@gmail.com> Wei Zhang <kweizh@gmail.com>
wingkwong <wingkwong.code@gmail.com> wingkwong <wingkwong.code@gmail.com>
x893675 <x893675@icloud.com>
Xiaofan Zhang <xiaofan.zhang@clinc.com> Xiaofan Zhang <xiaofan.zhang@clinc.com>
Ximo Guanter <ximo.guanter@gmail.com> Ximo Guanter <ximo.guanter@gmail.com>
Yamazaki Masashi <masi19bw@gmail.com> Yamazaki Masashi <masi19bw@gmail.com>
Yan Song <imeoer@linux.alibaba.com> Yan Song <imeoer@linux.alibaba.com>
Yong Tang <yong.tang.github@outlook.com> Yong Tang <yong.tang.github@outlook.com>
Yuichiro Kaneko <spiketeika@gmail.com> Yuichiro Kaneko <spiketeika@gmail.com>
yumemio <59369226+yumemio@users.noreply.github.com>
Yurii Rashkovskii <yrashk@gmail.com> Yurii Rashkovskii <yrashk@gmail.com>
yzewei <yangzewei@loongson.cn>
Zach Badgett <zach.badgett@gmail.com> Zach Badgett <zach.badgett@gmail.com>
zhangwenlong <zhangwenlong8911@163.com> zhangwenlong <zhangwenlong8911@163.com>
Zhizhen He <hezhizhen.yi@gmail.com>
Ziv Tsarfati <digger18@gmail.com> Ziv Tsarfati <digger18@gmail.com>
岁丰 <genglu.gl@antfin.com> 岁丰 <genglu.gl@antfin.com>
沈陵 <shenling.yyb@alibaba-inc.com> 沈陵 <shenling.yyb@alibaba-inc.com>
蝦米 <me@jhdxr.com>
郑泽宇 <perhapszzy@sina.com> 郑泽宇 <perhapszzy@sina.com>

View File

@@ -147,25 +147,32 @@ func (bf *BFlags) Parse() error {
return errors.Wrap(bf.Err, "error setting up flags") return errors.Wrap(bf.Err, "error setting up flags")
} }
for _, arg := range bf.Args { for _, a := range bf.Args {
if !strings.HasPrefix(arg, "--") { if a == "--" {
return errors.Errorf("arg should start with -- : %s", arg) // Stop processing further arguments as flags. We're matching
} // the POSIX Utility Syntax Guidelines here;
// https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html#tag_12_02
if arg == "--" { //
// > The first -- argument that is not an option-argument should be accepted
// > as a delimiter indicating the end of options. Any following arguments
// > should be treated as operands, even if they begin with the '-' character.
return nil return nil
} }
if !strings.HasPrefix(a, "--") {
return errors.Errorf("arg should start with -- : %s", a)
}
arg, value, hasValue := strings.Cut(arg[2:], "=") flagName, value, hasValue := strings.Cut(a, "=")
arg := flagName[2:]
flag, ok := bf.flags[arg] flag, ok := bf.flags[arg]
if !ok { if !ok {
err := errors.Errorf("unknown flag: %s", arg) err := errors.Errorf("unknown flag: %s", flagName)
return suggest.WrapError(err, arg, allFlags(bf.flags), true) return suggest.WrapError(err, arg, allFlags(bf.flags), true)
} }
if _, ok = bf.used[arg]; ok && flag.flagType != stringsType { if _, ok = bf.used[arg]; ok && flag.flagType != stringsType {
return errors.Errorf("duplicate flag specified: %s", arg) return errors.Errorf("duplicate flag specified: %s", flagName)
} }
bf.used[arg] = flag bf.used[arg] = flag
@@ -174,7 +181,7 @@ func (bf *BFlags) Parse() error {
case boolType: case boolType:
// value == "" is only ok if no "=" was specified // value == "" is only ok if no "=" was specified
if hasValue && value == "" { if hasValue && value == "" {
return errors.Errorf("missing a value on flag: %s", arg) return errors.Errorf("missing a value on flag: %s", flagName)
} }
switch strings.ToLower(value) { switch strings.ToLower(value) {
@@ -183,18 +190,18 @@ func (bf *BFlags) Parse() error {
case "false": case "false":
flag.Value = "false" flag.Value = "false"
default: default:
return errors.Errorf("expecting boolean value for flag %s, not: %s", arg, value) return errors.Errorf("expecting boolean value for flag %s, not: %s", flagName, value)
} }
case stringType: case stringType:
if !hasValue { if !hasValue {
return errors.Errorf("missing a value on flag: %s", arg) return errors.Errorf("missing a value on flag: %s", flagName)
} }
flag.Value = value flag.Value = value
case stringsType: case stringsType:
if !hasValue { if !hasValue {
return errors.Errorf("missing a value on flag: %s", arg) return errors.Errorf("missing a value on flag: %s", flagName)
} }
flag.StringValues = append(flag.StringValues, value) flag.StringValues = append(flag.StringValues, value)

View File

@@ -3,9 +3,8 @@ package instructions
import ( import (
"strings" "strings"
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/strslice"
"github.com/moby/buildkit/frontend/dockerfile/parser" "github.com/moby/buildkit/frontend/dockerfile/parser"
dockerspec "github.com/moby/docker-image-spec/specs-go/v1"
"github.com/pkg/errors" "github.com/pkg/errors"
) )
@@ -16,6 +15,7 @@ import (
type KeyValuePair struct { type KeyValuePair struct {
Key string Key string
Value string Value string
NoDelim bool
} }
func (kvp *KeyValuePair) String() string { func (kvp *KeyValuePair) String() string {
@@ -109,8 +109,9 @@ func expandKvp(kvp KeyValuePair, expander SingleWordExpander) (KeyValuePair, err
if err != nil { if err != nil {
return KeyValuePair{}, err return KeyValuePair{}, err
} }
return KeyValuePair{Key: key, Value: value}, nil return KeyValuePair{Key: key, Value: value, NoDelim: kvp.NoDelim}, nil
} }
func expandKvpsInPlace(kvps KeyValuePairs, expander SingleWordExpander) error { func expandKvpsInPlace(kvps KeyValuePairs, expander SingleWordExpander) error {
for i, kvp := range kvps { for i, kvp := range kvps {
newKvp, err := expandKvp(kvp, expander) newKvp, err := expandKvp(kvp, expander)
@@ -155,7 +156,7 @@ type MaintainerCommand struct {
} }
// NewLabelCommand creates a new 'LABEL' command // NewLabelCommand creates a new 'LABEL' command
func NewLabelCommand(k string, v string, NoExp bool) *LabelCommand { func NewLabelCommand(k string, v string, noExp bool) *LabelCommand {
kvp := KeyValuePair{Key: k, Value: v} kvp := KeyValuePair{Key: k, Value: v}
c := "LABEL " c := "LABEL "
c += kvp.String() c += kvp.String()
@@ -165,7 +166,7 @@ func NewLabelCommand(k string, v string, NoExp bool) *LabelCommand {
Labels: KeyValuePairs{ Labels: KeyValuePairs{
kvp, kvp,
}, },
noExpand: NoExp, noExpand: noExp,
} }
return cmd return cmd
} }
@@ -254,6 +255,12 @@ func (c *AddCommand) Expand(expander SingleWordExpander) error {
} }
c.Chown = expandedChown c.Chown = expandedChown
expandedChmod, err := expander(c.Chmod)
if err != nil {
return err
}
c.Chmod = expandedChmod
expandedChecksum, err := expander(c.Checksum) expandedChecksum, err := expander(c.Checksum)
if err != nil { if err != nil {
return err return err
@@ -286,6 +293,12 @@ func (c *CopyCommand) Expand(expander SingleWordExpander) error {
} }
c.Chown = expandedChown c.Chown = expandedChown
expandedChmod, err := expander(c.Chmod)
if err != nil {
return err
}
c.Chmod = expandedChmod
return c.SourcesAndDest.Expand(expander) return c.SourcesAndDest.Expand(expander)
} }
@@ -325,7 +338,7 @@ type ShellInlineFile struct {
// ShellDependantCmdLine represents a cmdline optionally prepended with the shell // ShellDependantCmdLine represents a cmdline optionally prepended with the shell
type ShellDependantCmdLine struct { type ShellDependantCmdLine struct {
CmdLine strslice.StrSlice CmdLine []string
Files []ShellInlineFile Files []ShellInlineFile
PrependShell bool PrependShell bool
} }
@@ -368,7 +381,7 @@ type CmdCommand struct {
// HEALTHCHECK <health-config> // HEALTHCHECK <health-config>
type HealthCheckCommand struct { type HealthCheckCommand struct {
withNameAndCode withNameAndCode
Health *container.HealthConfig Health *dockerspec.HealthcheckConfig
} }
// EntrypointCommand sets the default entrypoint of the container to use the // EntrypointCommand sets the default entrypoint of the container to use the
@@ -479,7 +492,7 @@ func (c *ArgCommand) Expand(expander SingleWordExpander) error {
// SHELL bash -e -c // SHELL bash -e -c
type ShellCommand struct { type ShellCommand struct {
withNameAndCode withNameAndCode
Shell strslice.StrSlice Shell []string
} }
// Stage represents a bundled collection of commands. // Stage represents a bundled collection of commands.
@@ -493,6 +506,7 @@ type ShellCommand struct {
type Stage struct { type Stage struct {
Name string // name of the stage Name string // name of the stage
Commands []Command // commands contained within the stage Commands []Command // commands contained within the stage
OrigCmd string // original FROM command, used for rule checks
BaseName string // name of the base stage or source BaseName string // name of the base stage or source
Platform string // platform of base source to use Platform string // platform of base source to use

View File

@@ -1,14 +1,13 @@
package instructions package instructions
import ( import (
"encoding/csv"
"regexp"
"strconv" "strconv"
"strings" "strings"
"github.com/docker/go-units" "github.com/docker/go-units"
"github.com/moby/buildkit/util/suggest" "github.com/moby/buildkit/util/suggest"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/tonistiigi/go-csvvalue"
) )
type MountType string type MountType string
@@ -84,13 +83,13 @@ func setMountState(cmd *RunCommand, expander SingleWordExpander) error {
if st == nil { if st == nil {
return errors.Errorf("no mount state") return errors.Errorf("no mount state")
} }
var mounts []*Mount mounts := make([]*Mount, len(st.flag.StringValues))
for _, str := range st.flag.StringValues { for i, str := range st.flag.StringValues {
m, err := parseMount(str, expander) m, err := parseMount(str, expander)
if err != nil { if err != nil {
return err return err
} }
mounts = append(mounts, m) mounts[i] = m
} }
st.mounts = mounts st.mounts = mounts
return nil return nil
@@ -123,14 +122,16 @@ type Mount struct {
CacheID string CacheID string
CacheSharing ShareMode CacheSharing ShareMode
Required bool Required bool
// Env optionally specifies the name of the environment variable for a secret.
// A pointer to an empty value uses the default
Env *string
Mode *uint64 Mode *uint64
UID *uint64 UID *uint64
GID *uint64 GID *uint64
} }
func parseMount(val string, expander SingleWordExpander) (*Mount, error) { func parseMount(val string, expander SingleWordExpander) (*Mount, error) {
csvReader := csv.NewReader(strings.NewReader(val)) fields, err := csvvalue.Fields(val, nil)
fields, err := csvReader.Read()
if err != nil { if err != nil {
return nil, errors.Wrap(err, "failed to parse csv mounts") return nil, errors.Wrap(err, "failed to parse csv mounts")
} }
@@ -176,9 +177,7 @@ func parseMount(val string, expander SingleWordExpander) (*Mount, error) {
return nil, err return nil, err
} }
} else if key == "from" { } else if key == "from" {
if matched, err := regexp.MatchString(`\$.`, value); err != nil { //nolint if idx := strings.IndexByte(value, '$'); idx != -1 && idx != len(value)-1 {
return nil, err
} else if matched {
return nil, errors.Errorf("'%s' doesn't support variable expansion, define alias stage instead", key) return nil, errors.Errorf("'%s' doesn't support variable expansion, define alias stage instead", key)
} }
} else { } else {
@@ -256,9 +255,11 @@ func parseMount(val string, expander SingleWordExpander) (*Mount, error) {
return nil, errors.Errorf("invalid value %s for gid", value) return nil, errors.Errorf("invalid value %s for gid", value)
} }
m.GID = &gid m.GID = &gid
case "env":
m.Env = &value
default: default:
allKeys := []string{ allKeys := []string{
"type", "from", "source", "target", "readonly", "id", "sharing", "required", "mode", "uid", "gid", "src", "dst", "ro", "rw", "readwrite", "type", "from", "source", "target", "readonly", "id", "sharing", "required", "size", "mode", "uid", "gid", "src", "dst", "destination", "ro", "rw", "readwrite", "env",
} }
return nil, suggest.WrapError(errors.Errorf("unexpected key '%s' in '%s'", key, field), key, allKeys, true) return nil, suggest.WrapError(errors.Errorf("unexpected key '%s' in '%s'", key, field), key, allKeys, true)
} }

View File

@@ -18,7 +18,7 @@ func errNotJSON(command, original string) error {
// double backslash and a [] pair. No, this is not perfect, but it doesn't // double backslash and a [] pair. No, this is not perfect, but it doesn't
// have to be. It's simply a hint to make life a little easier. // have to be. It's simply a hint to make life a little easier.
extra := "" extra := ""
original = filepath.FromSlash(strings.ToLower(strings.Replace(strings.ToLower(original), strings.ToLower(command)+" ", "", -1))) original = filepath.FromSlash(strings.ToLower(strings.ReplaceAll(strings.ToLower(original), strings.ToLower(command)+" ", "")))
if len(regexp.MustCompile(`"[a-z]:\\.*`).FindStringSubmatch(original)) > 0 && if len(regexp.MustCompile(`"[a-z]:\\.*`).FindStringSubmatch(original)) > 0 &&
!strings.Contains(original, `\\`) && !strings.Contains(original, `\\`) &&
strings.Contains(original, "[") && strings.Contains(original, "[") &&

View File

@@ -12,11 +12,11 @@ import (
"strings" "strings"
"time" "time"
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/strslice"
"github.com/moby/buildkit/frontend/dockerfile/command" "github.com/moby/buildkit/frontend/dockerfile/command"
"github.com/moby/buildkit/frontend/dockerfile/linter"
"github.com/moby/buildkit/frontend/dockerfile/parser" "github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/moby/buildkit/util/suggest" "github.com/moby/buildkit/util/suggest"
dockerspec "github.com/moby/docker-image-spec/specs-go/v1"
"github.com/pkg/errors" "github.com/pkg/errors"
) )
@@ -66,16 +66,24 @@ func newParseRequestFromNode(node *parser.Node) parseRequest {
} }
} }
// ParseInstruction converts an AST to a typed instruction (either a command or a build stage beginning when encountering a `FROM` statement)
func ParseInstruction(node *parser.Node) (v interface{}, err error) { func ParseInstruction(node *parser.Node) (v interface{}, err error) {
return ParseInstructionWithLinter(node, nil)
}
// ParseInstruction converts an AST to a typed instruction (either a command or a build stage beginning when encountering a `FROM` statement)
func ParseInstructionWithLinter(node *parser.Node, lint *linter.Linter) (v interface{}, err error) {
defer func() { defer func() {
if err != nil {
err = parser.WithLocation(err, node.Location()) err = parser.WithLocation(err, node.Location())
}
}() }()
req := newParseRequestFromNode(node) req := newParseRequestFromNode(node)
switch strings.ToLower(node.Value) { switch strings.ToLower(node.Value) {
case command.Env: case command.Env:
return parseEnv(req) return parseEnv(req)
case command.Maintainer: case command.Maintainer:
msg := linter.RuleMaintainerDeprecated.Format()
lint.Run(&linter.RuleMaintainerDeprecated, node.Location(), msg)
return parseMaintainer(req) return parseMaintainer(req)
case command.Label: case command.Label:
return parseLabel(req) return parseLabel(req)
@@ -84,7 +92,22 @@ func ParseInstruction(node *parser.Node) (v interface{}, err error) {
case command.Copy: case command.Copy:
return parseCopy(req) return parseCopy(req)
case command.From: case command.From:
return parseFrom(req) if !isLowerCaseStageName(req.args) {
msg := linter.RuleStageNameCasing.Format(req.args[2])
lint.Run(&linter.RuleStageNameCasing, node.Location(), msg)
}
if !doesFromCaseMatchAsCase(req) {
msg := linter.RuleFromAsCasing.Format(req.command, req.args[1])
lint.Run(&linter.RuleFromAsCasing, node.Location(), msg)
}
fromCmd, err := parseFrom(req)
if err != nil {
return nil, err
}
if fromCmd.Name != "" {
validateDefinitionDescription("FROM", []string{fromCmd.Name}, node.PrevComment, node.Location(), lint)
}
return fromCmd, nil
case command.Onbuild: case command.Onbuild:
return parseOnBuild(req) return parseOnBuild(req)
case command.Workdir: case command.Workdir:
@@ -106,7 +129,16 @@ func ParseInstruction(node *parser.Node) (v interface{}, err error) {
case command.StopSignal: case command.StopSignal:
return parseStopSignal(req) return parseStopSignal(req)
case command.Arg: case command.Arg:
return parseArg(req) argCmd, err := parseArg(req)
if err != nil {
return nil, err
}
argKeys := []string{}
for _, arg := range argCmd.Args {
argKeys = append(argKeys, arg.Key)
}
validateDefinitionDescription("ARG", argKeys, node.PrevComment, node.Location(), lint)
return argCmd, nil
case command.Shell: case command.Shell:
return parseShell(req) return parseShell(req)
} }
@@ -150,9 +182,9 @@ func (e *parseError) Unwrap() error {
// Parse a Dockerfile into a collection of buildable stages. // Parse a Dockerfile into a collection of buildable stages.
// metaArgs is a collection of ARG instructions that occur before the first FROM. // metaArgs is a collection of ARG instructions that occur before the first FROM.
func Parse(ast *parser.Node) (stages []Stage, metaArgs []ArgCommand, err error) { func Parse(ast *parser.Node, lint *linter.Linter) (stages []Stage, metaArgs []ArgCommand, err error) {
for _, n := range ast.Children { for _, n := range ast.Children {
cmd, err := ParseInstruction(n) cmd, err := ParseInstructionWithLinter(n, lint)
if err != nil { if err != nil {
return nil, nil, &parseError{inner: err, node: n} return nil, nil, &parseError{inner: err, node: n}
} }
@@ -183,18 +215,17 @@ func parseKvps(args []string, cmdName string) (KeyValuePairs, error) {
if len(args) == 0 { if len(args) == 0 {
return nil, errAtLeastOneArgument(cmdName) return nil, errAtLeastOneArgument(cmdName)
} }
if len(args)%2 != 0 { if len(args)%3 != 0 {
// should never get here, but just in case // should never get here, but just in case
return nil, errTooManyArguments(cmdName) return nil, errTooManyArguments(cmdName)
} }
var res KeyValuePairs var res KeyValuePairs
for j := 0; j < len(args); j += 2 { for j := 0; j < len(args); j += 3 {
if len(args[j]) == 0 { if len(args[j]) == 0 {
return nil, errBlankCommandNames(cmdName) return nil, errBlankCommandNames(cmdName)
} }
name := args[j] name, value, delim := args[j], args[j+1], args[j+2]
value := args[j+1] res = append(res, KeyValuePair{Key: name, Value: value, NoDelim: delim == ""})
res = append(res, KeyValuePair{Key: name, Value: value})
} }
return res, nil return res, nil
} }
@@ -383,6 +414,7 @@ func parseFrom(req parseRequest) (*Stage, error) {
code := strings.TrimSpace(req.original) code := strings.TrimSpace(req.original)
return &Stage{ return &Stage{
BaseName: req.args[0], BaseName: req.args[0],
OrigCmd: req.command,
Name: stageName, Name: stageName,
SourceCode: code, SourceCode: code,
Commands: []Command{}, Commands: []Command{},
@@ -450,7 +482,7 @@ func parseWorkdir(req parseRequest) (*WorkdirCommand, error) {
}, nil }, nil
} }
func parseShellDependentCommand(req parseRequest, command string, emptyAsNil bool) (ShellDependantCmdLine, error) { func parseShellDependentCommand(req parseRequest, emptyAsNil bool) (ShellDependantCmdLine, error) {
var files []ShellInlineFile var files []ShellInlineFile
for _, heredoc := range req.heredocs { for _, heredoc := range req.heredocs {
file := ShellInlineFile{ file := ShellInlineFile{
@@ -462,12 +494,11 @@ func parseShellDependentCommand(req parseRequest, command string, emptyAsNil boo
} }
args := handleJSONArgs(req.args, req.attributes) args := handleJSONArgs(req.args, req.attributes)
cmd := strslice.StrSlice(args) if emptyAsNil && len(args) == 0 {
if emptyAsNil && len(cmd) == 0 { args = nil
cmd = nil
} }
return ShellDependantCmdLine{ return ShellDependantCmdLine{
CmdLine: cmd, CmdLine: args,
Files: files, Files: files,
PrependShell: !req.attributes["json"], PrependShell: !req.attributes["json"],
}, nil }, nil
@@ -487,7 +518,7 @@ func parseRun(req parseRequest) (*RunCommand, error) {
} }
cmd.FlagsUsed = req.flags.Used() cmd.FlagsUsed = req.flags.Used()
cmdline, err := parseShellDependentCommand(req, "RUN", false) cmdline, err := parseShellDependentCommand(req, false)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -509,7 +540,7 @@ func parseCmd(req parseRequest) (*CmdCommand, error) {
return nil, err return nil, err
} }
cmdline, err := parseShellDependentCommand(req, "CMD", false) cmdline, err := parseShellDependentCommand(req, false)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -525,7 +556,7 @@ func parseEntrypoint(req parseRequest) (*EntrypointCommand, error) {
return nil, err return nil, err
} }
cmdline, err := parseShellDependentCommand(req, "ENTRYPOINT", true) cmdline, err := parseShellDependentCommand(req, true)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -550,8 +581,10 @@ func parseOptInterval(f *Flag) (time.Duration, error) {
if d == 0 { if d == 0 {
return 0, nil return 0, nil
} }
if d < container.MinimumDuration {
return 0, errors.Errorf("Interval %#v cannot be less than %s", f.name, container.MinimumDuration) const minimumDuration = time.Millisecond
if d < minimumDuration {
return 0, errors.Errorf("Interval %#v cannot be less than %s", f.name, minimumDuration)
} }
return d, nil return d, nil
} }
@@ -569,12 +602,11 @@ func parseHealthcheck(req parseRequest) (*HealthCheckCommand, error) {
if len(args) != 0 { if len(args) != 0 {
return nil, errors.New("HEALTHCHECK NONE takes no arguments") return nil, errors.New("HEALTHCHECK NONE takes no arguments")
} }
test := strslice.StrSlice{typ} cmd.Health = &dockerspec.HealthcheckConfig{
cmd.Health = &container.HealthConfig{ Test: []string{typ},
Test: test,
} }
} else { } else {
healthcheck := container.HealthConfig{} healthcheck := dockerspec.HealthcheckConfig{}
flInterval := req.flags.AddString("interval", "") flInterval := req.flags.AddString("interval", "")
flTimeout := req.flags.AddString("timeout", "") flTimeout := req.flags.AddString("timeout", "")
@@ -597,7 +629,7 @@ func parseHealthcheck(req parseRequest) (*HealthCheckCommand, error) {
typ = "CMD-SHELL" typ = "CMD-SHELL"
} }
healthcheck.Test = strslice.StrSlice(append([]string{typ}, cmdSlice...)) healthcheck.Test = append([]string{typ}, cmdSlice...)
default: default:
return nil, errors.Errorf("Unknown type %#v in HEALTHCHECK (try CMD)", typ) return nil, errors.Errorf("Unknown type %#v in HEALTHCHECK (try CMD)", typ)
} }
@@ -761,7 +793,7 @@ func parseShell(req parseRequest) (*ShellCommand, error) {
// SHELL ["powershell", "-command"] // SHELL ["powershell", "-command"]
return &ShellCommand{ return &ShellCommand{
Shell: strslice.StrSlice(shellSlice), Shell: shellSlice,
withNameAndCode: newWithNameAndCode(req), withNameAndCode: newWithNameAndCode(req),
}, nil }, nil
default: default:
@@ -815,3 +847,49 @@ func allInstructionNames() []string {
} }
return out return out
} }
func isLowerCaseStageName(cmdArgs []string) bool {
if len(cmdArgs) != 3 {
return true
}
stageName := cmdArgs[2]
return stageName == strings.ToLower(stageName)
}
func doesFromCaseMatchAsCase(req parseRequest) bool {
if len(req.args) < 3 {
return true
}
// consistent casing for the command is handled elsewhere.
// If the command is not consistent, there's no need to
// add an additional lint warning for the `as` argument.
fromHasLowerCasing := req.command == strings.ToLower(req.command)
fromHasUpperCasing := req.command == strings.ToUpper(req.command)
if !fromHasLowerCasing && !fromHasUpperCasing {
return true
}
if fromHasLowerCasing {
return req.args[1] == strings.ToLower(req.args[1])
}
return req.args[1] == strings.ToUpper(req.args[1])
}
func validateDefinitionDescription(instruction string, argKeys []string, descComments []string, location []parser.Range, lint *linter.Linter) {
if len(descComments) == 0 || len(argKeys) == 0 {
return
}
descCommentParts := strings.Split(descComments[len(descComments)-1], " ")
for _, key := range argKeys {
if key == descCommentParts[0] {
return
}
}
exampleKey := argKeys[0]
if len(argKeys) > 1 {
exampleKey = "<arg_key>"
}
msg := linter.RuleInvalidDefinitionDescription.Format(instruction, exampleKey)
lint.Run(&linter.RuleInvalidDefinitionDescription, location, msg)
}

View File

@@ -0,0 +1,189 @@
package linter
import (
"fmt"
"strconv"
"strings"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/pkg/errors"
)
type Config struct {
ExperimentalAll bool
ExperimentalRules []string
ReturnAsError bool
SkipAll bool
SkipRules []string
Warn LintWarnFunc
}
type Linter struct {
CalledRules []string
ExperimentalAll bool
ExperimentalRules map[string]struct{}
ReturnAsError bool
SkipAll bool
SkippedRules map[string]struct{}
Warn LintWarnFunc
}
func New(config *Config) *Linter {
toret := &Linter{
SkippedRules: map[string]struct{}{},
ExperimentalRules: map[string]struct{}{},
CalledRules: []string{},
Warn: config.Warn,
}
toret.SkipAll = config.SkipAll
toret.ExperimentalAll = config.ExperimentalAll
toret.ReturnAsError = config.ReturnAsError
for _, rule := range config.SkipRules {
toret.SkippedRules[rule] = struct{}{}
}
for _, rule := range config.ExperimentalRules {
toret.ExperimentalRules[rule] = struct{}{}
}
return toret
}
func (lc *Linter) Run(rule LinterRuleI, location []parser.Range, txt ...string) {
if lc == nil || lc.Warn == nil || rule.IsDeprecated() {
return
}
rulename := rule.RuleName()
if rule.IsExperimental() {
_, experimentalOk := lc.ExperimentalRules[rulename]
if !(lc.ExperimentalAll || experimentalOk) {
return
}
} else {
_, skipOk := lc.SkippedRules[rulename]
if lc.SkipAll || skipOk {
return
}
}
lc.CalledRules = append(lc.CalledRules, rulename)
rule.Run(lc.Warn, location, txt...)
}
func (lc *Linter) Error() error {
if lc == nil || !lc.ReturnAsError {
return nil
}
if len(lc.CalledRules) == 0 {
return nil
}
var rules []string
uniqueRules := map[string]struct{}{}
for _, r := range lc.CalledRules {
uniqueRules[r] = struct{}{}
}
for r := range uniqueRules {
rules = append(rules, r)
}
return errors.Errorf("lint violation found for rules: %s", strings.Join(rules, ", "))
}
type LinterRuleI interface {
RuleName() string
Run(warn LintWarnFunc, location []parser.Range, txt ...string)
IsDeprecated() bool
IsExperimental() bool
}
type LinterRule[F any] struct {
Name string
Description string
Deprecated bool
Experimental bool
URL string
Format F
}
func (rule *LinterRule[F]) RuleName() string {
return rule.Name
}
func (rule *LinterRule[F]) Run(warn LintWarnFunc, location []parser.Range, txt ...string) {
if len(txt) == 0 {
txt = []string{rule.Description}
}
short := strings.Join(txt, " ")
warn(rule.Name, rule.Description, rule.URL, short, location)
}
func (rule *LinterRule[F]) IsDeprecated() bool {
return rule.Deprecated
}
func (rule *LinterRule[F]) IsExperimental() bool {
return rule.Experimental
}
func LintFormatShort(rulename, msg string, line int) string {
msg = fmt.Sprintf("%s: %s", rulename, msg)
if line > 0 {
msg = fmt.Sprintf("%s (line %d)", msg, line)
}
return msg
}
type LintWarnFunc func(rulename, description, url, fmtmsg string, location []parser.Range)
func ParseLintOptions(checkStr string) (*Config, error) {
checkStr = strings.TrimSpace(checkStr)
if checkStr == "" {
return &Config{}, nil
}
parts := strings.SplitN(checkStr, ";", 3)
var skipSet, experimentalSet []string
var errorOnWarn, skipAll, experimentalAll bool
for _, p := range parts {
k, v, ok := strings.Cut(p, "=")
if !ok {
return nil, errors.Errorf("invalid check option %q", p)
}
k = strings.TrimSpace(k)
switch k {
case "skip":
v = strings.TrimSpace(v)
if v == "all" {
skipAll = true
} else {
skipSet = strings.Split(v, ",")
for i, rule := range skipSet {
skipSet[i] = strings.TrimSpace(rule)
}
}
case "experimental":
v = strings.TrimSpace(v)
if v == "all" {
experimentalAll = true
} else {
experimentalSet = strings.Split(v, ",")
for i, rule := range experimentalSet {
experimentalSet[i] = strings.TrimSpace(rule)
}
}
case "error":
v, err := strconv.ParseBool(strings.TrimSpace(v))
if err != nil {
return nil, errors.Wrapf(err, "failed to parse check option %q", p)
}
errorOnWarn = v
default:
return nil, errors.Errorf("invalid check option %q", k)
}
}
return &Config{
ExperimentalAll: experimentalAll,
ExperimentalRules: experimentalSet,
SkipRules: skipSet,
SkipAll: skipAll,
ReturnAsError: errorOnWarn,
}, nil
}

View File

@@ -0,0 +1,177 @@
package linter
import (
"fmt"
)
var (
RuleStageNameCasing = LinterRule[func(string) string]{
Name: "StageNameCasing",
Description: "Stage names should be lowercase",
URL: "https://docs.docker.com/go/dockerfile/rule/stage-name-casing/",
Format: func(stageName string) string {
return fmt.Sprintf("Stage name '%s' should be lowercase", stageName)
},
}
RuleFromAsCasing = LinterRule[func(string, string) string]{
Name: "FromAsCasing",
Description: "The 'as' keyword should match the case of the 'from' keyword",
URL: "https://docs.docker.com/go/dockerfile/rule/from-as-casing/",
Format: func(from, as string) string {
return fmt.Sprintf("'%s' and '%s' keywords' casing do not match", as, from)
},
}
RuleNoEmptyContinuation = LinterRule[func() string]{
Name: "NoEmptyContinuation",
Description: "Empty continuation lines will become errors in a future release",
URL: "https://docs.docker.com/go/dockerfile/rule/no-empty-continuation/",
Format: func() string {
return "Empty continuation line"
},
}
RuleConsistentInstructionCasing = LinterRule[func(string, string) string]{
Name: "ConsistentInstructionCasing",
Description: "All commands within the Dockerfile should use the same casing (either upper or lower)",
URL: "https://docs.docker.com/go/dockerfile/rule/consistent-instruction-casing/",
Format: func(violatingCommand, correctCasing string) string {
return fmt.Sprintf("Command '%s' should match the case of the command majority (%s)", violatingCommand, correctCasing)
},
}
RuleDuplicateStageName = LinterRule[func(string) string]{
Name: "DuplicateStageName",
Description: "Stage names should be unique",
URL: "https://docs.docker.com/go/dockerfile/rule/duplicate-stage-name/",
Format: func(stageName string) string {
return fmt.Sprintf("Duplicate stage name %q, stage names should be unique", stageName)
},
}
RuleReservedStageName = LinterRule[func(string) string]{
Name: "ReservedStageName",
Description: "Reserved words should not be used as stage names",
URL: "https://docs.docker.com/go/dockerfile/rule/reserved-stage-name/",
Format: func(reservedStageName string) string {
return fmt.Sprintf("Stage name should not use the same name as reserved stage %q", reservedStageName)
},
}
RuleJSONArgsRecommended = LinterRule[func(instructionName string) string]{
Name: "JSONArgsRecommended",
Description: "JSON arguments recommended for ENTRYPOINT/CMD to prevent unintended behavior related to OS signals",
URL: "https://docs.docker.com/go/dockerfile/rule/json-args-recommended/",
Format: func(instructionName string) string {
return fmt.Sprintf("JSON arguments recommended for %s to prevent unintended behavior related to OS signals", instructionName)
},
}
RuleMaintainerDeprecated = LinterRule[func() string]{
Name: "MaintainerDeprecated",
Description: "The MAINTAINER instruction is deprecated, use a label instead to define an image author",
URL: "https://docs.docker.com/go/dockerfile/rule/maintainer-deprecated/",
Format: func() string {
return "Maintainer instruction is deprecated in favor of using label"
},
}
RuleUndefinedArgInFrom = LinterRule[func(string, string) string]{
Name: "UndefinedArgInFrom",
Description: "FROM command must use declared ARGs",
URL: "https://docs.docker.com/go/dockerfile/rule/undefined-arg-in-from/",
Format: func(baseArg, suggest string) string {
out := fmt.Sprintf("FROM argument '%s' is not declared", baseArg)
if suggest != "" {
out += fmt.Sprintf(" (did you mean %s?)", suggest)
}
return out
},
}
RuleWorkdirRelativePath = LinterRule[func(workdir string) string]{
Name: "WorkdirRelativePath",
Description: "Relative workdir without an absolute workdir declared within the build can have unexpected results if the base image changes",
URL: "https://docs.docker.com/go/dockerfile/rule/workdir-relative-path/",
Format: func(workdir string) string {
return fmt.Sprintf("Relative workdir %q can have unexpected results if the base image changes", workdir)
},
}
RuleUndefinedVar = LinterRule[func(string, string) string]{
Name: "UndefinedVar",
Description: "Variables should be defined before their use",
URL: "https://docs.docker.com/go/dockerfile/rule/undefined-var/",
Format: func(arg, suggest string) string {
out := fmt.Sprintf("Usage of undefined variable '$%s'", arg)
if suggest != "" {
out += fmt.Sprintf(" (did you mean $%s?)", suggest)
}
return out
},
}
RuleMultipleInstructionsDisallowed = LinterRule[func(instructionName string) string]{
Name: "MultipleInstructionsDisallowed",
Description: "Multiple instructions of the same type should not be used in the same stage",
URL: "https://docs.docker.com/go/dockerfile/rule/multiple-instructions-disallowed/",
Format: func(instructionName string) string {
return fmt.Sprintf("Multiple %s instructions should not be used in the same stage because only the last one will be used", instructionName)
},
}
RuleLegacyKeyValueFormat = LinterRule[func(cmdName string) string]{
Name: "LegacyKeyValueFormat",
Description: "Legacy key/value format with whitespace separator should not be used",
URL: "https://docs.docker.com/go/dockerfile/rule/legacy-key-value-format/",
Format: func(cmdName string) string {
return fmt.Sprintf("\"%s key=value\" should be used instead of legacy \"%s key value\" format", cmdName, cmdName)
},
}
RuleInvalidBaseImagePlatform = LinterRule[func(string, string, string) string]{
Name: "InvalidBaseImagePlatform",
Description: "Base image platform does not match expected target platform",
Format: func(image, expected, actual string) string {
return fmt.Sprintf("Base image %s was pulled with platform %q, expected %q for current build", image, actual, expected)
},
}
RuleRedundantTargetPlatform = LinterRule[func(string) string]{
Name: "RedundantTargetPlatform",
Description: "Setting platform to predefined $TARGETPLATFORM in FROM is redundant as this is the default behavior",
URL: "https://docs.docker.com/go/dockerfile/rule/redundant-target-platform/",
Format: func(platformVar string) string {
return fmt.Sprintf("Setting platform to predefined %s in FROM is redundant as this is the default behavior", platformVar)
},
}
RuleSecretsUsedInArgOrEnv = LinterRule[func(string, string) string]{
Name: "SecretsUsedInArgOrEnv",
Description: "Sensitive data should not be used in the ARG or ENV commands",
URL: "https://docs.docker.com/go/dockerfile/rule/secrets-used-in-arg-or-env/",
Format: func(instruction, secretKey string) string {
return fmt.Sprintf("Do not use ARG or ENV instructions for sensitive data (%s %q)", instruction, secretKey)
},
}
RuleInvalidDefaultArgInFrom = LinterRule[func(string) string]{
Name: "InvalidDefaultArgInFrom",
Description: "Default value for global ARG results in an empty or invalid base image name",
URL: "https://docs.docker.com/go/dockerfile/rule/invalid-default-arg-in-from/",
Format: func(baseName string) string {
return fmt.Sprintf("Default value for ARG %v results in empty or invalid base image name", baseName)
},
}
RuleFromPlatformFlagConstDisallowed = LinterRule[func(string) string]{
Name: "FromPlatformFlagConstDisallowed",
Description: "FROM --platform flag should not use a constant value",
URL: "https://docs.docker.com/go/dockerfile/rule/from-platform-flag-const-disallowed/",
Format: func(platform string) string {
return fmt.Sprintf("FROM --platform flag should not use constant value %q", platform)
},
}
RuleCopyIgnoredFile = LinterRule[func(string, string) string]{
Name: "CopyIgnoredFile",
Description: "Attempting to Copy file that is excluded by .dockerignore",
URL: "https://docs.docker.com/go/dockerfile/rule/copy-ignored-file/",
Format: func(cmd, file string) string {
return fmt.Sprintf("Attempting to %s file %q that is excluded by .dockerignore", cmd, file)
},
Experimental: true,
}
RuleInvalidDefinitionDescription = LinterRule[func(string, string) string]{
Name: "InvalidDefinitionDescription",
Description: "Comment for build stage or argument should follow the format: `# <arg/stage name> <description>`. If this is not intended to be a description comment, add an empty line or comment between the instruction and the comment.",
URL: "https://docs.docker.com/go/dockerfile/rule/invalid-definition-description/",
Format: func(instruction, defName string) string {
return fmt.Sprintf("Comment for %s should follow the format: `# %s <description>`", instruction, defName)
},
Experimental: true,
}
)

View File

@@ -13,12 +13,14 @@ import (
const ( const (
keySyntax = "syntax" keySyntax = "syntax"
keyCheck = "check"
keyEscape = "escape" keyEscape = "escape"
) )
var validDirectives = map[string]struct{}{ var validDirectives = map[string]struct{}{
keySyntax: {}, keySyntax: {},
keyEscape: {}, keyEscape: {},
keyCheck: {},
} }
type Directive struct { type Directive struct {
@@ -110,6 +112,10 @@ func (d *DirectiveParser) ParseAll(data []byte) ([]*Directive, error) {
// This allows for a flexible range of input formats, and appropriate syntax // This allows for a flexible range of input formats, and appropriate syntax
// selection. // selection.
func DetectSyntax(dt []byte) (string, string, []Range, bool) { func DetectSyntax(dt []byte) (string, string, []Range, bool) {
return ParseDirective(keySyntax, dt)
}
func ParseDirective(key string, dt []byte) (string, string, []Range, bool) {
dt, hadShebang, err := discardShebang(dt) dt, hadShebang, err := discardShebang(dt)
if err != nil { if err != nil {
return "", "", nil, false return "", "", nil, false
@@ -119,42 +125,38 @@ func DetectSyntax(dt []byte) (string, string, []Range, bool) {
line++ line++
} }
// use default directive parser, and search for #syntax= // use default directive parser, and search for #key=
directiveParser := DirectiveParser{line: line} directiveParser := DirectiveParser{line: line}
if syntax, cmdline, loc, ok := detectSyntaxFromParser(dt, directiveParser); ok { if syntax, cmdline, loc, ok := detectDirectiveFromParser(key, dt, directiveParser); ok {
return syntax, cmdline, loc, true return syntax, cmdline, loc, true
} }
// use directive with different comment prefix, and search for //syntax= // use directive with different comment prefix, and search for //key=
directiveParser = DirectiveParser{line: line} directiveParser = DirectiveParser{line: line}
directiveParser.setComment("//") directiveParser.setComment("//")
if syntax, cmdline, loc, ok := detectSyntaxFromParser(dt, directiveParser); ok { if syntax, cmdline, loc, ok := detectDirectiveFromParser(key, dt, directiveParser); ok {
return syntax, cmdline, loc, true return syntax, cmdline, loc, true
} }
// search for possible json directives // use json directive, and search for { "key": "..." }
var directive struct { jsonDirective := map[string]string{}
Syntax string `json:"syntax"` if err := json.Unmarshal(dt, &jsonDirective); err == nil {
} if v, ok := jsonDirective[key]; ok {
if err := json.Unmarshal(dt, &directive); err == nil {
if directive.Syntax != "" {
loc := []Range{{ loc := []Range{{
Start: Position{Line: line}, Start: Position{Line: line},
End: Position{Line: line}, End: Position{Line: line},
}} }}
return directive.Syntax, directive.Syntax, loc, true return v, v, loc, true
} }
} }
return "", "", nil, false return "", "", nil, false
} }
func detectSyntaxFromParser(dt []byte, parser DirectiveParser) (string, string, []Range, bool) { func detectDirectiveFromParser(key string, dt []byte, parser DirectiveParser) (string, string, []Range, bool) {
directives, _ := parser.ParseAll(dt) directives, _ := parser.ParseAll(dt)
for _, d := range directives { for _, d := range directives {
// check for syntax directive before erroring out, since the error if d.Name == key {
// might have occurred *after* the syntax directive
if d.Name == keySyntax {
p, _, _ := strings.Cut(d.Value, " ") p, _, _ := strings.Cut(d.Value, " ")
return p, d.Value, d.Location, true return p, d.Value, d.Location, true
} }

View File

@@ -34,12 +34,24 @@ func withLocation(err error, start, end int) error {
// WithLocation extends an error with a source code location // WithLocation extends an error with a source code location
func WithLocation(err error, location []Range) error { func WithLocation(err error, location []Range) error {
return setLocation(err, location, true)
}
func SetLocation(err error, location []Range) error {
return setLocation(err, location, false)
}
func setLocation(err error, location []Range, add bool) error {
if err == nil { if err == nil {
return nil return nil
} }
var el *ErrorLocation var el *ErrorLocation
if errors.As(err, &el) { if errors.As(err, &el) {
if add {
el.Locations = append(el.Locations, location) el.Locations = append(el.Locations, location)
} else {
el.Locations = [][]Range{location}
}
return err return err
} }
return stack.Enable(&ErrorLocation{ return stack.Enable(&ErrorLocation{

View File

@@ -17,6 +17,7 @@ import (
var ( var (
errDockerfileNotStringArray = errors.New("when using JSON array syntax, arrays must be comprised of strings only") errDockerfileNotStringArray = errors.New("when using JSON array syntax, arrays must be comprised of strings only")
errDockerfileNotJSONArray = errors.New("not a JSON array")
) )
const ( const (
@@ -58,11 +59,11 @@ func parseWords(rest string, d *directives) []string {
words := []string{} words := []string{}
phase := inSpaces phase := inSpaces
word := ""
quote := '\000' quote := '\000'
blankOK := false blankOK := false
var ch rune var ch rune
var chWidth int var chWidth int
var sbuilder strings.Builder
for pos := 0; pos <= len(rest); pos += chWidth { for pos := 0; pos <= len(rest); pos += chWidth {
if pos != len(rest) { if pos != len(rest) {
@@ -79,18 +80,18 @@ func parseWords(rest string, d *directives) []string {
phase = inWord // found it, fall through phase = inWord // found it, fall through
} }
if (phase == inWord || phase == inQuote) && (pos == len(rest)) { if (phase == inWord || phase == inQuote) && (pos == len(rest)) {
if blankOK || len(word) > 0 { if blankOK || sbuilder.Len() > 0 {
words = append(words, word) words = append(words, sbuilder.String())
} }
break break
} }
if phase == inWord { if phase == inWord {
if unicode.IsSpace(ch) { if unicode.IsSpace(ch) {
phase = inSpaces phase = inSpaces
if blankOK || len(word) > 0 { if blankOK || sbuilder.Len() > 0 {
words = append(words, word) words = append(words, sbuilder.String())
} }
word = "" sbuilder.Reset()
blankOK = false blankOK = false
continue continue
} }
@@ -106,11 +107,11 @@ func parseWords(rest string, d *directives) []string {
// If we're not quoted and we see an escape token, then always just // If we're not quoted and we see an escape token, then always just
// add the escape token plus the char to the word, even if the char // add the escape token plus the char to the word, even if the char
// is a quote. // is a quote.
word += string(ch) sbuilder.WriteRune(ch)
pos += chWidth pos += chWidth
ch, chWidth = utf8.DecodeRuneInString(rest[pos:]) ch, chWidth = utf8.DecodeRuneInString(rest[pos:])
} }
word += string(ch) sbuilder.WriteRune(ch)
continue continue
} }
if phase == inQuote { if phase == inQuote {
@@ -124,10 +125,10 @@ func parseWords(rest string, d *directives) []string {
continue // just skip the escape token at end continue // just skip the escape token at end
} }
pos += chWidth pos += chWidth
word += string(ch) sbuilder.WriteRune(ch)
ch, chWidth = utf8.DecodeRuneInString(rest[pos:]) ch, chWidth = utf8.DecodeRuneInString(rest[pos:])
} }
word += string(ch) sbuilder.WriteRune(ch)
} }
} }
@@ -154,7 +155,7 @@ func parseNameVal(rest string, key string, d *directives) (*Node, error) {
if len(parts) < 2 { if len(parts) < 2 {
return nil, errors.Errorf("%s must have two arguments", key) return nil, errors.Errorf("%s must have two arguments", key)
} }
return newKeyValueNode(parts[0], parts[1]), nil return newKeyValueNode(parts[0], parts[1], ""), nil
} }
var rootNode *Node var rootNode *Node
@@ -165,17 +166,20 @@ func parseNameVal(rest string, key string, d *directives) (*Node, error) {
} }
parts := strings.SplitN(word, "=", 2) parts := strings.SplitN(word, "=", 2)
node := newKeyValueNode(parts[0], parts[1]) node := newKeyValueNode(parts[0], parts[1], "=")
rootNode, prevNode = appendKeyValueNode(node, rootNode, prevNode) rootNode, prevNode = appendKeyValueNode(node, rootNode, prevNode)
} }
return rootNode, nil return rootNode, nil
} }
func newKeyValueNode(key, value string) *Node { func newKeyValueNode(key, value, sep string) *Node {
return &Node{ return &Node{
Value: key, Value: key,
Next: &Node{Value: value}, Next: &Node{
Value: value,
Next: &Node{Value: sep},
},
} }
} }
@@ -187,7 +191,9 @@ func appendKeyValueNode(node, rootNode, prevNode *Node) (*Node, *Node) {
prevNode.Next = node prevNode.Next = node
} }
prevNode = node.Next for prevNode = node.Next; prevNode.Next != nil; {
prevNode = prevNode.Next
}
return rootNode, prevNode return rootNode, prevNode
} }
@@ -269,14 +275,14 @@ func parseString(rest string, d *directives) (*Node, map[string]bool, error) {
} }
// parseJSON converts JSON arrays to an AST. // parseJSON converts JSON arrays to an AST.
func parseJSON(rest string, d *directives) (*Node, map[string]bool, error) { func parseJSON(rest string) (*Node, map[string]bool, error) {
rest = strings.TrimLeftFunc(rest, unicode.IsSpace) rest = strings.TrimLeftFunc(rest, unicode.IsSpace)
if !strings.HasPrefix(rest, "[") { if !strings.HasPrefix(rest, "[") {
return nil, nil, errors.Errorf("Error parsing %q as a JSON array", rest) return nil, nil, errDockerfileNotJSONArray
} }
var myJSON []interface{} var myJSON []interface{}
if err := json.NewDecoder(strings.NewReader(rest)).Decode(&myJSON); err != nil { if err := json.Unmarshal([]byte(rest), &myJSON); err != nil {
return nil, nil, err return nil, nil, err
} }
@@ -307,7 +313,7 @@ func parseMaybeJSON(rest string, d *directives) (*Node, map[string]bool, error)
return nil, nil, nil return nil, nil, nil
} }
node, attrs, err := parseJSON(rest, d) node, attrs, err := parseJSON(rest)
if err == nil { if err == nil {
return node, attrs, nil return node, attrs, nil
@@ -325,7 +331,7 @@ func parseMaybeJSON(rest string, d *directives) (*Node, map[string]bool, error)
// so, passes to parseJSON; if not, attempts to parse it as a whitespace // so, passes to parseJSON; if not, attempts to parse it as a whitespace
// delimited string. // delimited string.
func parseMaybeJSONToList(rest string, d *directives) (*Node, map[string]bool, error) { func parseMaybeJSONToList(rest string, d *directives) (*Node, map[string]bool, error) {
node, attrs, err := parseJSON(rest, d) node, attrs, err := parseJSON(rest)
if err == nil { if err == nil {
return node, attrs, nil return node, attrs, nil

View File

@@ -114,7 +114,6 @@ type Heredoc struct {
var ( var (
dispatch map[string]func(string, *directives) (*Node, map[string]bool, error) dispatch map[string]func(string, *directives) (*Node, map[string]bool, error)
reWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`) reWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`)
reComment = regexp.MustCompile(`^#.*$`)
reHeredoc = regexp.MustCompile(`^(\d*)<<(-?)([^<]*)$`) reHeredoc = regexp.MustCompile(`^(\d*)<<(-?)([^<]*)$`)
reLeadingTabs = regexp.MustCompile(`(?m)^\t+`) reLeadingTabs = regexp.MustCompile(`(?m)^\t+`)
) )
@@ -168,16 +167,17 @@ func (d *directives) setEscapeToken(s string) error {
// possibleParserDirective looks for parser directives, eg '# escapeToken=<char>'. // possibleParserDirective looks for parser directives, eg '# escapeToken=<char>'.
// Parser directives must precede any builder instruction or other comments, // Parser directives must precede any builder instruction or other comments,
// and cannot be repeated. // and cannot be repeated. Returns true if a parser directive was found.
func (d *directives) possibleParserDirective(line string) error { func (d *directives) possibleParserDirective(line []byte) (bool, error) {
directive, err := d.parser.ParseLine([]byte(line)) directive, err := d.parser.ParseLine(line)
if err != nil { if err != nil {
return err return false, err
} }
if directive != nil && directive.Name == keyEscape { if directive != nil && directive.Name == keyEscape {
return d.setEscapeToken(directive.Value) err := d.setEscapeToken(directive.Value)
return err == nil, err
} }
return nil return directive != nil, nil
} }
// newDefaultDirectives returns a new directives structure with the default escapeToken token // newDefaultDirectives returns a new directives structure with the default escapeToken token
@@ -284,6 +284,7 @@ func Parse(rwc io.Reader) (*Result, error) {
scanner.Split(scanLines) scanner.Split(scanLines)
warnings := []Warning{} warnings := []Warning{}
var comments []string var comments []string
buf := &bytes.Buffer{}
var err error var err error
for scanner.Scan() { for scanner.Scan() {
@@ -300,21 +301,29 @@ func Parse(rwc io.Reader) (*Result, error) {
comments = append(comments, comment) comments = append(comments, comment)
} }
} }
bytesRead, err = processLine(d, bytesRead, true) var directiveOk bool
bytesRead, directiveOk, err = processLine(d, bytesRead, true)
// If the line is a directive, strip it from the comments
// so it doesn't get added to the AST.
if directiveOk {
comments = comments[:len(comments)-1]
}
if err != nil { if err != nil {
return nil, withLocation(err, currentLine, 0) return nil, withLocation(err, currentLine, 0)
} }
currentLine++ currentLine++
startLine := currentLine startLine := currentLine
line, isEndOfLine := trimContinuationCharacter(string(bytesRead), d) bytesRead, isEndOfLine := trimContinuationCharacter(bytesRead, d)
if isEndOfLine && line == "" { if isEndOfLine && len(bytesRead) == 0 {
continue continue
} }
buf.Reset()
buf.Write(bytesRead)
var hasEmptyContinuationLine bool var hasEmptyContinuationLine bool
for !isEndOfLine && scanner.Scan() { for !isEndOfLine && scanner.Scan() {
bytesRead, err := processLine(d, scanner.Bytes(), false) bytesRead, _, err := processLine(d, scanner.Bytes(), false)
if err != nil { if err != nil {
return nil, withLocation(err, currentLine, 0) return nil, withLocation(err, currentLine, 0)
} }
@@ -329,16 +338,17 @@ func Parse(rwc io.Reader) (*Result, error) {
continue continue
} }
continuationLine := string(bytesRead) bytesRead, isEndOfLine = trimContinuationCharacter(bytesRead, d)
continuationLine, isEndOfLine = trimContinuationCharacter(continuationLine, d) buf.Write(bytesRead)
line += continuationLine
} }
line := buf.String()
if hasEmptyContinuationLine { if hasEmptyContinuationLine {
warnings = append(warnings, Warning{ warnings = append(warnings, Warning{
Short: "Empty continuation line found in: " + line, Short: "Empty continuation line found in: " + line,
Detail: [][]byte{[]byte("Empty continuation lines will become errors in a future release")}, Detail: [][]byte{[]byte("Empty continuation lines will become errors in a future release")},
URL: "https://github.com/moby/moby/pull/33719", URL: "https://docs.docker.com/go/dockerfile/rule/no-empty-continuation/",
Location: &Range{Start: Position{Line: currentLine}, End: Position{Line: currentLine}}, Location: &Range{Start: Position{Line: currentLine}, End: Position{Line: currentLine}},
}) })
} }
@@ -348,7 +358,7 @@ func Parse(rwc io.Reader) (*Result, error) {
return nil, withLocation(err, startLine, currentLine) return nil, withLocation(err, startLine, currentLine)
} }
if child.canContainHeredoc() { if child.canContainHeredoc() && strings.Contains(line, "<<") {
heredocs, err := heredocsFromLine(line) heredocs, err := heredocsFromLine(line)
if err != nil { if err != nil {
return nil, withLocation(err, startLine, currentLine) return nil, withLocation(err, startLine, currentLine)
@@ -415,7 +425,7 @@ func heredocFromMatch(match []string) (*Heredoc, error) {
// If there are quotes in one but not the other, then we know that some // If there are quotes in one but not the other, then we know that some
// part of the heredoc word is quoted, so we shouldn't expand the content. // part of the heredoc word is quoted, so we shouldn't expand the content.
shlex.RawQuotes = false shlex.RawQuotes = false
words, err := shlex.ProcessWords(rest, []string{}) words, err := shlex.ProcessWords(rest, emptyEnvs{})
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -425,7 +435,7 @@ func heredocFromMatch(match []string) (*Heredoc, error) {
} }
shlex.RawQuotes = true shlex.RawQuotes = true
wordsRaw, err := shlex.ProcessWords(rest, []string{}) wordsRaw, err := shlex.ProcessWords(rest, emptyEnvs{})
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -466,7 +476,7 @@ func heredocsFromLine(line string) ([]Heredoc, error) {
shlex.RawQuotes = true shlex.RawQuotes = true
shlex.RawEscapes = true shlex.RawEscapes = true
shlex.SkipUnsetEnv = true shlex.SkipUnsetEnv = true
words, _ := shlex.ProcessWords(line, []string{}) words, _ := shlex.ProcessWords(line, emptyEnvs{})
var docs []Heredoc var docs []Heredoc
for _, word := range words { for _, word := range words {
@@ -487,7 +497,10 @@ func ChompHeredocContent(src string) string {
} }
func trimComments(src []byte) []byte { func trimComments(src []byte) []byte {
return reComment.ReplaceAll(src, []byte{}) if !isComment(src) {
return src
}
return nil
} }
func trimLeadingWhitespace(src []byte) []byte { func trimLeadingWhitespace(src []byte) []byte {
@@ -501,7 +514,8 @@ func trimNewline(src []byte) []byte {
} }
func isComment(line []byte) bool { func isComment(line []byte) bool {
return reComment.Match(trimLeadingWhitespace(trimNewline(line))) line = trimLeadingWhitespace(line)
return len(line) > 0 && line[0] == '#'
} }
func isEmptyContinuationLine(line []byte) bool { func isEmptyContinuationLine(line []byte) bool {
@@ -510,9 +524,9 @@ func isEmptyContinuationLine(line []byte) bool {
var utf8bom = []byte{0xEF, 0xBB, 0xBF} var utf8bom = []byte{0xEF, 0xBB, 0xBF}
func trimContinuationCharacter(line string, d *directives) (string, bool) { func trimContinuationCharacter(line []byte, d *directives) ([]byte, bool) {
if d.lineContinuationRegex.MatchString(line) { if d.lineContinuationRegex.Match(line) {
line = d.lineContinuationRegex.ReplaceAllString(line, "$1") line = d.lineContinuationRegex.ReplaceAll(line, []byte("$1"))
return line, false return line, false
} }
return line, true return line, true
@@ -520,12 +534,13 @@ func trimContinuationCharacter(line string, d *directives) (string, bool) {
// TODO: remove stripLeftWhitespace after deprecation period. It seems silly // TODO: remove stripLeftWhitespace after deprecation period. It seems silly
// to preserve whitespace on continuation lines. Why is that done? // to preserve whitespace on continuation lines. Why is that done?
func processLine(d *directives, token []byte, stripLeftWhitespace bool) ([]byte, error) { func processLine(d *directives, token []byte, stripLeftWhitespace bool) ([]byte, bool, error) {
token = trimNewline(token) token = trimNewline(token)
if stripLeftWhitespace { if stripLeftWhitespace {
token = trimLeadingWhitespace(token) token = trimLeadingWhitespace(token)
} }
return trimComments(token), d.possibleParserDirective(string(token)) directiveOk, err := d.possibleParserDirective(token)
return trimComments(token), directiveOk, err
} }
// Variation of bufio.ScanLines that preserves the line endings // Variation of bufio.ScanLines that preserves the line endings
@@ -550,3 +565,13 @@ func handleScannerError(err error) error {
return err return err
} }
} }
type emptyEnvs struct{}
func (emptyEnvs) Get(string) (string, bool) {
return "", false
}
func (emptyEnvs) Keys() []string {
return nil
}

View File

@@ -36,7 +36,7 @@ func extractBuilderFlags(line string) (string, []string, error) {
words := []string{} words := []string{}
phase := inSpaces phase := inSpaces
word := "" sbuilder := &strings.Builder{}
quote := '\000' quote := '\000'
blankOK := false blankOK := false
var ch rune var ch rune
@@ -62,13 +62,14 @@ func extractBuilderFlags(line string) (string, []string, error) {
phase = inWord // found something with "--", fall through phase = inWord // found something with "--", fall through
} }
if (phase == inWord || phase == inQuote) && (pos == len(line)) { if (phase == inWord || phase == inQuote) && (pos == len(line)) {
if word != "--" && (blankOK || len(word) > 0) { if word := sbuilder.String(); word != "--" && (blankOK || len(word) > 0) {
words = append(words, word) words = append(words, word)
} }
break break
} }
if phase == inWord { if phase == inWord {
if unicode.IsSpace(ch) { if unicode.IsSpace(ch) {
word := sbuilder.String()
phase = inSpaces phase = inSpaces
if word == "--" { if word == "--" {
return line[pos:], words, nil return line[pos:], words, nil
@@ -76,7 +77,7 @@ func extractBuilderFlags(line string) (string, []string, error) {
if blankOK || len(word) > 0 { if blankOK || len(word) > 0 {
words = append(words, word) words = append(words, word)
} }
word = "" sbuilder.Reset()
blankOK = false blankOK = false
continue continue
} }
@@ -93,7 +94,9 @@ func extractBuilderFlags(line string) (string, []string, error) {
pos++ pos++
ch = rune(line[pos]) ch = rune(line[pos])
} }
word += string(ch) if _, err := sbuilder.WriteRune(ch); err != nil {
return "", nil, err
}
continue continue
} }
if phase == inQuote { if phase == inQuote {
@@ -109,7 +112,9 @@ func extractBuilderFlags(line string) (string, []string, error) {
pos++ pos++
ch = rune(line[pos]) ch = rune(line[pos])
} }
word += string(ch) if _, err := sbuilder.WriteRune(ch); err != nil {
return "", nil, err
}
} }
} }

View File

@@ -9,3 +9,10 @@ package shell
func EqualEnvKeys(from, to string) bool { func EqualEnvKeys(from, to string) bool {
return from == to return from == to
} }
// NormalizeEnvKey returns the key in a normalized form that can be used
// for comparison. On Unix this is a no-op. On Windows this converts the
// key to uppercase.
func NormalizeEnvKey(key string) string {
return key
}

View File

@@ -8,3 +8,10 @@ import "strings"
func EqualEnvKeys(from, to string) bool { func EqualEnvKeys(from, to string) bool {
return strings.EqualFold(from, to) return strings.EqualFold(from, to)
} }
// NormalizeEnvKey returns the key in a normalized form that can be used
// for comparison. On Unix this is a no-op. On Windows this converts the
// key to uppercase.
func NormalizeEnvKey(key string) string {
return strings.ToUpper(key)
}

View File

@@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"fmt" "fmt"
"regexp" "regexp"
"slices"
"strings" "strings"
"text/scanner" "text/scanner"
"unicode" "unicode"
@@ -11,6 +12,11 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
) )
type EnvGetter interface {
Get(string) (string, bool)
Keys() []string
}
// Lex performs shell word splitting and variable expansion. // Lex performs shell word splitting and variable expansion.
// //
// Lex takes a string and an array of env variables and // Lex takes a string and an array of env variables and
@@ -18,12 +24,15 @@ import (
// tokens. Tries to mimic bash shell process. // tokens. Tries to mimic bash shell process.
// It doesn't support all flavors of ${xx:...} formats but new ones can // It doesn't support all flavors of ${xx:...} formats but new ones can
// be added by adding code to the "special ${} format processing" section // be added by adding code to the "special ${} format processing" section
//
// It is not safe to call methods on a Lex instance concurrently.
type Lex struct { type Lex struct {
escapeToken rune escapeToken rune
RawQuotes bool RawQuotes bool
RawEscapes bool RawEscapes bool
SkipProcessQuotes bool SkipProcessQuotes bool
SkipUnsetEnv bool SkipUnsetEnv bool
shellWord shellWord
} }
// NewLex creates a new Lex which uses escapeToken to escape quotes. // NewLex creates a new Lex which uses escapeToken to escape quotes.
@@ -32,10 +41,13 @@ func NewLex(escapeToken rune) *Lex {
} }
// ProcessWord will use the 'env' list of environment variables, // ProcessWord will use the 'env' list of environment variables,
// and replace any env var references in 'word'. // and replace any env var references in 'word'. It will also
func (s *Lex) ProcessWord(word string, env []string) (string, error) { // return variables in word which were not found in the 'env' list,
word, _, err := s.process(word, BuildEnvs(env)) // which is useful in later linting.
return word, err // TODO: rename
func (s *Lex) ProcessWord(word string, env EnvGetter) (string, map[string]struct{}, error) {
result, err := s.process(word, env, true)
return result.Result, result.Unmatched, err
} }
// ProcessWords will use the 'env' list of environment variables, // ProcessWords will use the 'env' list of environment variables,
@@ -45,59 +57,58 @@ func (s *Lex) ProcessWord(word string, env []string) (string, error) {
// this splitting is done **after** the env var substitutions are done. // this splitting is done **after** the env var substitutions are done.
// Note, each one is trimmed to remove leading and trailing spaces (unless // Note, each one is trimmed to remove leading and trailing spaces (unless
// they are quoted", but ProcessWord retains spaces between words. // they are quoted", but ProcessWord retains spaces between words.
func (s *Lex) ProcessWords(word string, env []string) ([]string, error) { func (s *Lex) ProcessWords(word string, env EnvGetter) ([]string, error) {
_, words, err := s.process(word, BuildEnvs(env)) result, err := s.process(word, env, false)
return words, err return result.Words, err
} }
// ProcessWordWithMap will use the 'env' list of environment variables, type ProcessWordResult struct {
// and replace any env var references in 'word'. Result string
func (s *Lex) ProcessWordWithMap(word string, env map[string]string) (string, error) { Words []string
word, _, err := s.process(word, env) Matched map[string]struct{}
return word, err Unmatched map[string]struct{}
} }
// ProcessWordWithMatches will use the 'env' list of environment variables, // ProcessWordWithMatches will use the 'env' list of environment variables,
// replace any env var references in 'word' and return the env that were used. // replace any env var references in 'word' and return the env that were used.
func (s *Lex) ProcessWordWithMatches(word string, env map[string]string) (string, map[string]struct{}, error) { func (s *Lex) ProcessWordWithMatches(word string, env EnvGetter) (ProcessWordResult, error) {
sw := s.init(word, env) return s.process(word, env, true)
word, _, err := sw.process(word)
return word, sw.matches, err
} }
func (s *Lex) ProcessWordsWithMap(word string, env map[string]string) ([]string, error) { func (s *Lex) initWord(word string, env EnvGetter, capture bool) *shellWord {
_, words, err := s.process(word, env) sw := &s.shellWord
return words, err sw.Lex = s
} sw.envs = env
sw.capture = capture
func (s *Lex) init(word string, env map[string]string) *shellWord { sw.rawEscapes = s.RawEscapes
sw := &shellWord{ if capture {
envs: env, sw.matches = nil
escapeToken: s.escapeToken, sw.nonmatches = nil
skipUnsetEnv: s.SkipUnsetEnv,
skipProcessQuotes: s.SkipProcessQuotes,
rawQuotes: s.RawQuotes,
rawEscapes: s.RawEscapes,
matches: make(map[string]struct{}),
} }
sw.scanner.Init(strings.NewReader(word)) sw.scanner.Init(strings.NewReader(word))
return sw return sw
} }
func (s *Lex) process(word string, env map[string]string) (string, []string, error) { func (s *Lex) process(word string, env EnvGetter, capture bool) (ProcessWordResult, error) {
sw := s.init(word, env) sw := s.initWord(word, env, capture)
return sw.process(word) word, words, err := sw.process(word)
return ProcessWordResult{
Result: word,
Words: words,
Matched: sw.matches,
Unmatched: sw.nonmatches,
}, err
} }
type shellWord struct { type shellWord struct {
*Lex
wordsBuffer strings.Builder
scanner scanner.Scanner scanner scanner.Scanner
envs map[string]string envs EnvGetter
escapeToken rune
rawQuotes bool
rawEscapes bool rawEscapes bool
skipUnsetEnv bool capture bool // capture matches and nonmatches
skipProcessQuotes bool
matches map[string]struct{} matches map[string]struct{}
nonmatches map[string]struct{}
} }
func (sw *shellWord) process(source string) (string, []string, error) { func (sw *shellWord) process(source string) (string, []string, error) {
@@ -109,16 +120,16 @@ func (sw *shellWord) process(source string) (string, []string, error) {
} }
type wordsStruct struct { type wordsStruct struct {
word string buf *strings.Builder
words []string words []string
inWord bool inWord bool
} }
func (w *wordsStruct) addChar(ch rune) { func (w *wordsStruct) addChar(ch rune) {
if unicode.IsSpace(ch) && w.inWord { if unicode.IsSpace(ch) && w.inWord {
if len(w.word) != 0 { if w.buf.Len() != 0 {
w.words = append(w.words, w.word) w.words = append(w.words, w.buf.String())
w.word = "" w.buf.Reset()
w.inWord = false w.inWord = false
} }
} else if !unicode.IsSpace(ch) { } else if !unicode.IsSpace(ch) {
@@ -127,7 +138,7 @@ func (w *wordsStruct) addChar(ch rune) {
} }
func (w *wordsStruct) addRawChar(ch rune) { func (w *wordsStruct) addRawChar(ch rune) {
w.word += string(ch) w.buf.WriteRune(ch)
w.inWord = true w.inWord = true
} }
@@ -138,16 +149,16 @@ func (w *wordsStruct) addString(str string) {
} }
func (w *wordsStruct) addRawString(str string) { func (w *wordsStruct) addRawString(str string) {
w.word += str w.buf.WriteString(str)
w.inWord = true w.inWord = true
} }
func (w *wordsStruct) getWords() []string { func (w *wordsStruct) getWords() []string {
if len(w.word) > 0 { if w.buf.Len() > 0 {
w.words = append(w.words, w.word) w.words = append(w.words, w.buf.String())
// Just in case we're called again by mistake // Just in case we're called again by mistake
w.word = "" w.buf.Reset()
w.inWord = false w.inWord = false
} }
return w.words return w.words
@@ -156,13 +167,18 @@ func (w *wordsStruct) getWords() []string {
// Process the word, starting at 'pos', and stop when we get to the // Process the word, starting at 'pos', and stop when we get to the
// end of the word or the 'stopChar' character // end of the word or the 'stopChar' character
func (sw *shellWord) processStopOn(stopChar rune, rawEscapes bool) (string, []string, error) { func (sw *shellWord) processStopOn(stopChar rune, rawEscapes bool) (string, []string, error) {
var result bytes.Buffer // result buffer can't be currently shared for shellWord as it is called internally
// by processDollar
var result strings.Builder
sw.wordsBuffer.Reset()
var words wordsStruct var words wordsStruct
words.buf = &sw.wordsBuffer
// no need to initialize all the time
var charFuncMapping = map[rune]func() (string, error){ var charFuncMapping = map[rune]func() (string, error){
'$': sw.processDollar, '$': sw.processDollar,
} }
if !sw.skipProcessQuotes { if !sw.SkipProcessQuotes {
charFuncMapping['\''] = sw.processSingleQuote charFuncMapping['\''] = sw.processSingleQuote
charFuncMapping['"'] = sw.processDoubleQuote charFuncMapping['"'] = sw.processDoubleQuote
} }
@@ -239,7 +255,7 @@ func (sw *shellWord) processSingleQuote() (string, error) {
var result bytes.Buffer var result bytes.Buffer
ch := sw.scanner.Next() ch := sw.scanner.Next()
if sw.rawQuotes { if sw.RawQuotes {
result.WriteRune(ch) result.WriteRune(ch)
} }
@@ -249,7 +265,7 @@ func (sw *shellWord) processSingleQuote() (string, error) {
case scanner.EOF: case scanner.EOF:
return "", errors.New("unexpected end of statement while looking for matching single-quote") return "", errors.New("unexpected end of statement while looking for matching single-quote")
case '\'': case '\'':
if sw.rawQuotes { if sw.RawQuotes {
result.WriteRune(ch) result.WriteRune(ch)
} }
return result.String(), nil return result.String(), nil
@@ -274,7 +290,7 @@ func (sw *shellWord) processDoubleQuote() (string, error) {
var result bytes.Buffer var result bytes.Buffer
ch := sw.scanner.Next() ch := sw.scanner.Next()
if sw.rawQuotes { if sw.RawQuotes {
result.WriteRune(ch) result.WriteRune(ch)
} }
@@ -284,7 +300,7 @@ func (sw *shellWord) processDoubleQuote() (string, error) {
return "", errors.New("unexpected end of statement while looking for matching double-quote") return "", errors.New("unexpected end of statement while looking for matching double-quote")
case '"': case '"':
ch := sw.scanner.Next() ch := sw.scanner.Next()
if sw.rawQuotes { if sw.RawQuotes {
result.WriteRune(ch) result.WriteRune(ch)
} }
return result.String(), nil return result.String(), nil
@@ -328,7 +344,7 @@ func (sw *shellWord) processDollar() (string, error) {
return "$", nil return "$", nil
} }
value, found := sw.getEnv(name) value, found := sw.getEnv(name)
if !found && sw.skipUnsetEnv { if !found && sw.SkipUnsetEnv {
return "$" + name, nil return "$" + name, nil
} }
return value, nil return value, nil
@@ -351,7 +367,7 @@ func (sw *shellWord) processDollar() (string, error) {
case '}': case '}':
// Normal ${xx} case // Normal ${xx} case
value, set := sw.getEnv(name) value, set := sw.getEnv(name)
if !set && sw.skipUnsetEnv { if !set && sw.SkipUnsetEnv {
return fmt.Sprintf("${%s}", name), nil return fmt.Sprintf("${%s}", name), nil
} }
return value, nil return value, nil
@@ -362,6 +378,9 @@ func (sw *shellWord) processDollar() (string, error) {
fallthrough fallthrough
case '+', '-', '?', '#', '%': case '+', '-', '?', '#', '%':
rawEscapes := ch == '#' || ch == '%' rawEscapes := ch == '#' || ch == '%'
if nullIsUnset && rawEscapes {
return "", errors.Errorf("unsupported modifier (%s) in substitution", chs)
}
word, _, err := sw.processStopOn('}', rawEscapes) word, _, err := sw.processStopOn('}', rawEscapes)
if err != nil { if err != nil {
if sw.scanner.Peek() == scanner.EOF { if sw.scanner.Peek() == scanner.EOF {
@@ -373,7 +392,7 @@ func (sw *shellWord) processDollar() (string, error) {
// Grab the current value of the variable in question so we // Grab the current value of the variable in question so we
// can use it to determine what to do based on the modifier // can use it to determine what to do based on the modifier
value, set := sw.getEnv(name) value, set := sw.getEnv(name)
if sw.skipUnsetEnv && !set { if sw.SkipUnsetEnv && !set {
return fmt.Sprintf("${%s%s%s}", name, chs, word), nil return fmt.Sprintf("${%s%s%s}", name, chs, word), nil
} }
@@ -407,7 +426,8 @@ func (sw *shellWord) processDollar() (string, error) {
case '%', '#': case '%', '#':
// %/# matches the shortest pattern expansion, %%/## the longest // %/# matches the shortest pattern expansion, %%/## the longest
greedy := false greedy := false
if word[0] == byte(ch) {
if len(word) > 0 && word[0] == byte(ch) {
greedy = true greedy = true
word = word[1:] word = word[1:]
} }
@@ -442,7 +462,7 @@ func (sw *shellWord) processDollar() (string, error) {
} }
value, set := sw.getEnv(name) value, set := sw.getEnv(name)
if sw.skipUnsetEnv && !set { if sw.SkipUnsetEnv && !set {
return fmt.Sprintf("${%s/%s/%s}", name, pattern, replacement), nil return fmt.Sprintf("${%s/%s/%s}", name, pattern, replacement), nil
} }
@@ -505,33 +525,51 @@ func isSpecialParam(char rune) bool {
} }
func (sw *shellWord) getEnv(name string) (string, bool) { func (sw *shellWord) getEnv(name string) (string, bool) {
for key, value := range sw.envs { v, ok := sw.envs.Get(name)
if EqualEnvKeys(name, key) { if ok {
sw.matches[name] = struct{}{} if sw.capture {
return value, true if sw.matches == nil {
sw.matches = make(map[string]struct{})
} }
sw.matches[name] = struct{}{}
}
return v, true
}
if sw.capture {
if sw.nonmatches == nil {
sw.nonmatches = make(map[string]struct{})
}
sw.nonmatches[name] = struct{}{}
} }
return "", false return "", false
} }
func BuildEnvs(env []string) map[string]string { func EnvsFromSlice(env []string) EnvGetter {
envs := map[string]string{} envs := map[string]string{}
keys := make([]string, 0, len(env))
for _, e := range env { for _, e := range env {
i := strings.Index(e, "=") k, v, _ := strings.Cut(e, "=")
keys = append(keys, k)
if i < 0 { envs[NormalizeEnvKey(k)] = v
envs[e] = ""
} else {
k := e[:i]
v := e[i+1:]
// overwrite value if key already exists
envs[k] = v
} }
return &envGetter{env: envs, keys: keys}
} }
return envs type envGetter struct {
env map[string]string
keys []string
}
var _ EnvGetter = &envGetter{}
func (e *envGetter) Get(key string) (string, bool) {
key = NormalizeEnvKey(key)
v, ok := e.env[key]
return v, ok
}
func (e *envGetter) Keys() []string {
return e.keys
} }
// convertShellPatternToRegex converts a shell-like wildcard pattern // convertShellPatternToRegex converts a shell-like wildcard pattern
@@ -623,11 +661,7 @@ func reversePattern(pattern string) string {
func reverseString(str string) string { func reverseString(str string) string {
out := []rune(str) out := []rune(str)
outIdx := len(out) - 1 slices.Reverse(out)
for i := 0; i < outIdx; i++ {
out[i], out[outIdx] = out[outIdx], out[i]
outIdx--
}
return string(out) return string(out)
} }

67
vendor/github.com/moby/buildkit/util/stack/compress.go generated vendored Normal file
View File

@@ -0,0 +1,67 @@
package stack
import (
"slices"
)
func compressStacks(st []*Stack) []*Stack {
if len(st) == 0 {
return nil
}
slices.SortFunc(st, func(a, b *Stack) int {
return len(b.Frames) - len(a.Frames)
})
out := []*Stack{st[0]}
loop0:
for _, st := range st[1:] {
maxIdx := -1
for _, prev := range out {
idx := subFrames(st.Frames, prev.Frames)
if idx == -1 {
continue
}
// full match, potentially skip all
if idx == len(st.Frames)-1 {
if st.Pid == prev.Pid && st.Version == prev.Version && slices.Compare(st.Cmdline, st.Cmdline) == 0 {
continue loop0
}
}
if idx > maxIdx {
maxIdx = idx
}
}
if maxIdx > 0 {
st.Frames = st.Frames[:len(st.Frames)-maxIdx]
}
out = append(out, st)
}
return out
}
func subFrames(a, b []*Frame) int {
idx := -1
i := len(a) - 1
j := len(b) - 1
for i >= 0 {
if j < 0 {
break
}
if a[i].Equal(b[j]) {
idx++
i--
j--
} else {
break
}
}
return idx
}
func (a *Frame) Equal(b *Frame) bool {
return a.File == b.File && a.Line == b.Line && a.Name == b.Name
}

View File

@@ -1,3 +0,0 @@
package stack
//go:generate protoc -I=. -I=../../vendor/ --go_out=. --go_opt=paths=source_relative --go_opt=Mstack.proto=/util/stack stack.proto

View File

@@ -44,24 +44,29 @@ func Helper() {
} }
func Traces(err error) []*Stack { func Traces(err error) []*Stack {
return compressStacks(traces(err))
}
func traces(err error) []*Stack {
var st []*Stack var st []*Stack
wrapped, ok := err.(interface { switch e := err.(type) {
Unwrap() error case interface{ Unwrap() error }:
}) st = Traces(e.Unwrap())
if ok { case interface{ Unwrap() []error }:
st = Traces(wrapped.Unwrap()) for _, ue := range e.Unwrap() {
st = Traces(ue)
// Only take first stack
if len(st) > 0 {
break
}
}
} }
if ste, ok := err.(interface { switch ste := err.(type) {
StackTrace() errors.StackTrace case interface{ StackTrace() errors.StackTrace }:
}); ok {
st = append(st, convertStack(ste.StackTrace())) st = append(st, convertStack(ste.StackTrace()))
} case interface{ StackTrace() *Stack }:
if ste, ok := err.(interface {
StackTrace() *Stack
}); ok {
st = append(st, ste.StackTrace()) st = append(st, ste.StackTrace())
} }

View File

@@ -1,8 +1,8 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.31.0 // protoc-gen-go v1.35.1
// protoc v3.11.4 // protoc v3.11.4
// source: stack.proto // source: github.com/moby/buildkit/util/stack/stack.proto
package stack package stack
@@ -34,12 +34,10 @@ type Stack struct {
func (x *Stack) Reset() { func (x *Stack) Reset() {
*x = Stack{} *x = Stack{}
if protoimpl.UnsafeEnabled { mi := &file_github_com_moby_buildkit_util_stack_stack_proto_msgTypes[0]
mi := &file_stack_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
}
func (x *Stack) String() string { func (x *Stack) String() string {
return protoimpl.X.MessageStringOf(x) return protoimpl.X.MessageStringOf(x)
@@ -48,8 +46,8 @@ func (x *Stack) String() string {
func (*Stack) ProtoMessage() {} func (*Stack) ProtoMessage() {}
func (x *Stack) ProtoReflect() protoreflect.Message { func (x *Stack) ProtoReflect() protoreflect.Message {
mi := &file_stack_proto_msgTypes[0] mi := &file_github_com_moby_buildkit_util_stack_stack_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil { if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
@@ -61,7 +59,7 @@ func (x *Stack) ProtoReflect() protoreflect.Message {
// Deprecated: Use Stack.ProtoReflect.Descriptor instead. // Deprecated: Use Stack.ProtoReflect.Descriptor instead.
func (*Stack) Descriptor() ([]byte, []int) { func (*Stack) Descriptor() ([]byte, []int) {
return file_stack_proto_rawDescGZIP(), []int{0} return file_github_com_moby_buildkit_util_stack_stack_proto_rawDescGZIP(), []int{0}
} }
func (x *Stack) GetFrames() []*Frame { func (x *Stack) GetFrames() []*Frame {
@@ -111,12 +109,10 @@ type Frame struct {
func (x *Frame) Reset() { func (x *Frame) Reset() {
*x = Frame{} *x = Frame{}
if protoimpl.UnsafeEnabled { mi := &file_github_com_moby_buildkit_util_stack_stack_proto_msgTypes[1]
mi := &file_stack_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
}
func (x *Frame) String() string { func (x *Frame) String() string {
return protoimpl.X.MessageStringOf(x) return protoimpl.X.MessageStringOf(x)
@@ -125,8 +121,8 @@ func (x *Frame) String() string {
func (*Frame) ProtoMessage() {} func (*Frame) ProtoMessage() {}
func (x *Frame) ProtoReflect() protoreflect.Message { func (x *Frame) ProtoReflect() protoreflect.Message {
mi := &file_stack_proto_msgTypes[1] mi := &file_github_com_moby_buildkit_util_stack_stack_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil { if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
@@ -138,7 +134,7 @@ func (x *Frame) ProtoReflect() protoreflect.Message {
// Deprecated: Use Frame.ProtoReflect.Descriptor instead. // Deprecated: Use Frame.ProtoReflect.Descriptor instead.
func (*Frame) Descriptor() ([]byte, []int) { func (*Frame) Descriptor() ([]byte, []int) {
return file_stack_proto_rawDescGZIP(), []int{1} return file_github_com_moby_buildkit_util_stack_stack_proto_rawDescGZIP(), []int{1}
} }
func (x *Frame) GetName() string { func (x *Frame) GetName() string {
@@ -162,45 +158,49 @@ func (x *Frame) GetLine() int32 {
return 0 return 0
} }
var File_stack_proto protoreflect.FileDescriptor var File_github_com_moby_buildkit_util_stack_stack_proto protoreflect.FileDescriptor
var file_stack_proto_rawDesc = []byte{ var file_github_com_moby_buildkit_util_stack_stack_proto_rawDesc = []byte{
0x0a, 0x0b, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05, 0x73, 0x0a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6d, 0x6f, 0x62,
0x74, 0x61, 0x63, 0x6b, 0x22, 0x8f, 0x01, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x63, 0x6b, 0x12, 0x24, 0x79, 0x2f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x6b, 0x69, 0x74, 0x2f, 0x75, 0x74, 0x69, 0x6c, 0x2f,
0x0a, 0x06, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0c, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2f, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x2e, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x52, 0x06, 0x66, 0x72, 0x6f, 0x12, 0x05, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x22, 0x8f, 0x01, 0x0a, 0x05, 0x53, 0x74, 0x61,
0x61, 0x6d, 0x65, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6d, 0x64, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x63, 0x6b, 0x12, 0x24, 0x0a, 0x06, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03,
0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6d, 0x64, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x10, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x2e, 0x46, 0x72, 0x61, 0x6d, 0x65,
0x0a, 0x03, 0x70, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x70, 0x69, 0x64, 0x52, 0x06, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6d, 0x64, 0x6c,
0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x69, 0x6e, 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6d, 0x64, 0x6c, 0x69,
0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x6e, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x70, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52,
0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x03, 0x70, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18,
0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x43, 0x0a, 0x05, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x12, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1a,
0x12, 0x0a, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x4e, 0x0a, 0x08, 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09,
0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x52, 0x08, 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x43, 0x0a, 0x05, 0x46, 0x72,
0x09, 0x52, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x4c, 0x69, 0x6e, 0x65, 0x18, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x4c, 0x69, 0x6e, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x09, 0x52, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x18,
0x74, 0x6f, 0x33, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x4c,
0x69, 0x6e, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x4c, 0x69, 0x6e, 0x65, 0x42,
0x25, 0x5a, 0x23, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6d, 0x6f,
0x62, 0x79, 0x2f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x6b, 0x69, 0x74, 0x2f, 0x75, 0x74, 0x69, 0x6c,
0x2f, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
} }
var ( var (
file_stack_proto_rawDescOnce sync.Once file_github_com_moby_buildkit_util_stack_stack_proto_rawDescOnce sync.Once
file_stack_proto_rawDescData = file_stack_proto_rawDesc file_github_com_moby_buildkit_util_stack_stack_proto_rawDescData = file_github_com_moby_buildkit_util_stack_stack_proto_rawDesc
) )
func file_stack_proto_rawDescGZIP() []byte { func file_github_com_moby_buildkit_util_stack_stack_proto_rawDescGZIP() []byte {
file_stack_proto_rawDescOnce.Do(func() { file_github_com_moby_buildkit_util_stack_stack_proto_rawDescOnce.Do(func() {
file_stack_proto_rawDescData = protoimpl.X.CompressGZIP(file_stack_proto_rawDescData) file_github_com_moby_buildkit_util_stack_stack_proto_rawDescData = protoimpl.X.CompressGZIP(file_github_com_moby_buildkit_util_stack_stack_proto_rawDescData)
}) })
return file_stack_proto_rawDescData return file_github_com_moby_buildkit_util_stack_stack_proto_rawDescData
} }
var file_stack_proto_msgTypes = make([]protoimpl.MessageInfo, 2) var file_github_com_moby_buildkit_util_stack_stack_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_stack_proto_goTypes = []interface{}{ var file_github_com_moby_buildkit_util_stack_stack_proto_goTypes = []any{
(*Stack)(nil), // 0: stack.Stack (*Stack)(nil), // 0: stack.Stack
(*Frame)(nil), // 1: stack.Frame (*Frame)(nil), // 1: stack.Frame
} }
var file_stack_proto_depIdxs = []int32{ var file_github_com_moby_buildkit_util_stack_stack_proto_depIdxs = []int32{
1, // 0: stack.Stack.frames:type_name -> stack.Frame 1, // 0: stack.Stack.frames:type_name -> stack.Frame
1, // [1:1] is the sub-list for method output_type 1, // [1:1] is the sub-list for method output_type
1, // [1:1] is the sub-list for method input_type 1, // [1:1] is the sub-list for method input_type
@@ -209,53 +209,27 @@ var file_stack_proto_depIdxs = []int32{
0, // [0:1] is the sub-list for field type_name 0, // [0:1] is the sub-list for field type_name
} }
func init() { file_stack_proto_init() } func init() { file_github_com_moby_buildkit_util_stack_stack_proto_init() }
func file_stack_proto_init() { func file_github_com_moby_buildkit_util_stack_stack_proto_init() {
if File_stack_proto != nil { if File_github_com_moby_buildkit_util_stack_stack_proto != nil {
return return
} }
if !protoimpl.UnsafeEnabled {
file_stack_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Stack); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_stack_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Frame); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{} type x struct{}
out := protoimpl.TypeBuilder{ out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{ File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(), GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_stack_proto_rawDesc, RawDescriptor: file_github_com_moby_buildkit_util_stack_stack_proto_rawDesc,
NumEnums: 0, NumEnums: 0,
NumMessages: 2, NumMessages: 2,
NumExtensions: 0, NumExtensions: 0,
NumServices: 0, NumServices: 0,
}, },
GoTypes: file_stack_proto_goTypes, GoTypes: file_github_com_moby_buildkit_util_stack_stack_proto_goTypes,
DependencyIndexes: file_stack_proto_depIdxs, DependencyIndexes: file_github_com_moby_buildkit_util_stack_stack_proto_depIdxs,
MessageInfos: file_stack_proto_msgTypes, MessageInfos: file_github_com_moby_buildkit_util_stack_stack_proto_msgTypes,
}.Build() }.Build()
File_stack_proto = out.File File_github_com_moby_buildkit_util_stack_stack_proto = out.File
file_stack_proto_rawDesc = nil file_github_com_moby_buildkit_util_stack_stack_proto_rawDesc = nil
file_stack_proto_goTypes = nil file_github_com_moby_buildkit_util_stack_stack_proto_goTypes = nil
file_stack_proto_depIdxs = nil file_github_com_moby_buildkit_util_stack_stack_proto_depIdxs = nil
} }

View File

@@ -2,6 +2,8 @@ syntax = "proto3";
package stack; package stack;
option go_package = "github.com/moby/buildkit/util/stack";
message Stack { message Stack {
repeated Frame frames = 1; repeated Frame frames = 1;
repeated string cmdline = 2; repeated string cmdline = 2;

View File

@@ -0,0 +1,660 @@
// Code generated by protoc-gen-go-vtproto. DO NOT EDIT.
// protoc-gen-go-vtproto version: v0.6.1-0.20240319094008-0393e58bdf10
// source: github.com/moby/buildkit/util/stack/stack.proto
package stack
import (
fmt "fmt"
protohelpers "github.com/planetscale/vtprotobuf/protohelpers"
proto "google.golang.org/protobuf/proto"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
io "io"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
func (m *Stack) CloneVT() *Stack {
if m == nil {
return (*Stack)(nil)
}
r := new(Stack)
r.Pid = m.Pid
r.Version = m.Version
r.Revision = m.Revision
if rhs := m.Frames; rhs != nil {
tmpContainer := make([]*Frame, len(rhs))
for k, v := range rhs {
tmpContainer[k] = v.CloneVT()
}
r.Frames = tmpContainer
}
if rhs := m.Cmdline; rhs != nil {
tmpContainer := make([]string, len(rhs))
copy(tmpContainer, rhs)
r.Cmdline = tmpContainer
}
if len(m.unknownFields) > 0 {
r.unknownFields = make([]byte, len(m.unknownFields))
copy(r.unknownFields, m.unknownFields)
}
return r
}
func (m *Stack) CloneMessageVT() proto.Message {
return m.CloneVT()
}
func (m *Frame) CloneVT() *Frame {
if m == nil {
return (*Frame)(nil)
}
r := new(Frame)
r.Name = m.Name
r.File = m.File
r.Line = m.Line
if len(m.unknownFields) > 0 {
r.unknownFields = make([]byte, len(m.unknownFields))
copy(r.unknownFields, m.unknownFields)
}
return r
}
func (m *Frame) CloneMessageVT() proto.Message {
return m.CloneVT()
}
func (this *Stack) EqualVT(that *Stack) bool {
if this == that {
return true
} else if this == nil || that == nil {
return false
}
if len(this.Frames) != len(that.Frames) {
return false
}
for i, vx := range this.Frames {
vy := that.Frames[i]
if p, q := vx, vy; p != q {
if p == nil {
p = &Frame{}
}
if q == nil {
q = &Frame{}
}
if !p.EqualVT(q) {
return false
}
}
}
if len(this.Cmdline) != len(that.Cmdline) {
return false
}
for i, vx := range this.Cmdline {
vy := that.Cmdline[i]
if vx != vy {
return false
}
}
if this.Pid != that.Pid {
return false
}
if this.Version != that.Version {
return false
}
if this.Revision != that.Revision {
return false
}
return string(this.unknownFields) == string(that.unknownFields)
}
func (this *Stack) EqualMessageVT(thatMsg proto.Message) bool {
that, ok := thatMsg.(*Stack)
if !ok {
return false
}
return this.EqualVT(that)
}
func (this *Frame) EqualVT(that *Frame) bool {
if this == that {
return true
} else if this == nil || that == nil {
return false
}
if this.Name != that.Name {
return false
}
if this.File != that.File {
return false
}
if this.Line != that.Line {
return false
}
return string(this.unknownFields) == string(that.unknownFields)
}
func (this *Frame) EqualMessageVT(thatMsg proto.Message) bool {
that, ok := thatMsg.(*Frame)
if !ok {
return false
}
return this.EqualVT(that)
}
func (m *Stack) MarshalVT() (dAtA []byte, err error) {
if m == nil {
return nil, nil
}
size := m.SizeVT()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBufferVT(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Stack) MarshalToVT(dAtA []byte) (int, error) {
size := m.SizeVT()
return m.MarshalToSizedBufferVT(dAtA[:size])
}
func (m *Stack) MarshalToSizedBufferVT(dAtA []byte) (int, error) {
if m == nil {
return 0, nil
}
i := len(dAtA)
_ = i
var l int
_ = l
if m.unknownFields != nil {
i -= len(m.unknownFields)
copy(dAtA[i:], m.unknownFields)
}
if len(m.Revision) > 0 {
i -= len(m.Revision)
copy(dAtA[i:], m.Revision)
i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Revision)))
i--
dAtA[i] = 0x2a
}
if len(m.Version) > 0 {
i -= len(m.Version)
copy(dAtA[i:], m.Version)
i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Version)))
i--
dAtA[i] = 0x22
}
if m.Pid != 0 {
i = protohelpers.EncodeVarint(dAtA, i, uint64(m.Pid))
i--
dAtA[i] = 0x18
}
if len(m.Cmdline) > 0 {
for iNdEx := len(m.Cmdline) - 1; iNdEx >= 0; iNdEx-- {
i -= len(m.Cmdline[iNdEx])
copy(dAtA[i:], m.Cmdline[iNdEx])
i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Cmdline[iNdEx])))
i--
dAtA[i] = 0x12
}
}
if len(m.Frames) > 0 {
for iNdEx := len(m.Frames) - 1; iNdEx >= 0; iNdEx-- {
size, err := m.Frames[iNdEx].MarshalToSizedBufferVT(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = protohelpers.EncodeVarint(dAtA, i, uint64(size))
i--
dAtA[i] = 0xa
}
}
return len(dAtA) - i, nil
}
func (m *Frame) MarshalVT() (dAtA []byte, err error) {
if m == nil {
return nil, nil
}
size := m.SizeVT()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBufferVT(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Frame) MarshalToVT(dAtA []byte) (int, error) {
size := m.SizeVT()
return m.MarshalToSizedBufferVT(dAtA[:size])
}
func (m *Frame) MarshalToSizedBufferVT(dAtA []byte) (int, error) {
if m == nil {
return 0, nil
}
i := len(dAtA)
_ = i
var l int
_ = l
if m.unknownFields != nil {
i -= len(m.unknownFields)
copy(dAtA[i:], m.unknownFields)
}
if m.Line != 0 {
i = protohelpers.EncodeVarint(dAtA, i, uint64(m.Line))
i--
dAtA[i] = 0x18
}
if len(m.File) > 0 {
i -= len(m.File)
copy(dAtA[i:], m.File)
i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.File)))
i--
dAtA[i] = 0x12
}
if len(m.Name) > 0 {
i -= len(m.Name)
copy(dAtA[i:], m.Name)
i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Name)))
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func (m *Stack) SizeVT() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.Frames) > 0 {
for _, e := range m.Frames {
l = e.SizeVT()
n += 1 + l + protohelpers.SizeOfVarint(uint64(l))
}
}
if len(m.Cmdline) > 0 {
for _, s := range m.Cmdline {
l = len(s)
n += 1 + l + protohelpers.SizeOfVarint(uint64(l))
}
}
if m.Pid != 0 {
n += 1 + protohelpers.SizeOfVarint(uint64(m.Pid))
}
l = len(m.Version)
if l > 0 {
n += 1 + l + protohelpers.SizeOfVarint(uint64(l))
}
l = len(m.Revision)
if l > 0 {
n += 1 + l + protohelpers.SizeOfVarint(uint64(l))
}
n += len(m.unknownFields)
return n
}
func (m *Frame) SizeVT() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = len(m.Name)
if l > 0 {
n += 1 + l + protohelpers.SizeOfVarint(uint64(l))
}
l = len(m.File)
if l > 0 {
n += 1 + l + protohelpers.SizeOfVarint(uint64(l))
}
if m.Line != 0 {
n += 1 + protohelpers.SizeOfVarint(uint64(m.Line))
}
n += len(m.unknownFields)
return n
}
func (m *Stack) UnmarshalVT(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Stack: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Stack: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Frames", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return protohelpers.ErrInvalidLength
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return protohelpers.ErrInvalidLength
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Frames = append(m.Frames, &Frame{})
if err := m.Frames[len(m.Frames)-1].UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Cmdline", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return protohelpers.ErrInvalidLength
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return protohelpers.ErrInvalidLength
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Cmdline = append(m.Cmdline, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Pid", wireType)
}
m.Pid = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Pid |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Version", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return protohelpers.ErrInvalidLength
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return protohelpers.ErrInvalidLength
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Version = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Revision", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return protohelpers.ErrInvalidLength
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return protohelpers.ErrInvalidLength
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Revision = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := protohelpers.Skip(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return protohelpers.ErrInvalidLength
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *Frame) UnmarshalVT(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Frame: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Frame: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return protohelpers.ErrInvalidLength
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return protohelpers.ErrInvalidLength
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Name = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field File", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return protohelpers.ErrInvalidLength
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return protohelpers.ErrInvalidLength
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.File = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Line", wireType)
}
m.Line = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protohelpers.ErrIntOverflow
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Line |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := protohelpers.Skip(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return protohelpers.ErrInvalidLength
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}

View File

@@ -6,11 +6,7 @@ import (
"github.com/agext/levenshtein" "github.com/agext/levenshtein"
) )
// WrapError wraps error with a suggestion for fixing it func Search(val string, options []string, caseSensitive bool) (string, bool) {
func WrapError(err error, val string, options []string, caseSensitive bool) error {
if err == nil {
return nil
}
orig := val orig := val
if !caseSensitive { if !caseSensitive {
val = strings.ToLower(val) val = strings.ToLower(val)
@@ -23,7 +19,7 @@ func WrapError(err error, val string, options []string, caseSensitive bool) erro
} }
if val == opt { if val == opt {
// exact match means error was unrelated to the value // exact match means error was unrelated to the value
return err return "", false
} }
dist := levenshtein.Distance(val, opt, nil) dist := levenshtein.Distance(val, opt, nil)
if dist < mindist { if dist < mindist {
@@ -35,12 +31,25 @@ func WrapError(err error, val string, options []string, caseSensitive bool) erro
mindist = dist mindist = dist
} }
} }
return match, match != ""
}
if match == "" { // WrapError wraps error with a suggestion for fixing it
func WrapError(err error, val string, options []string, caseSensitive bool) error {
_, err = WrapErrorMaybe(err, val, options, caseSensitive)
return err return err
} }
return &suggestError{ func WrapErrorMaybe(err error, val string, options []string, caseSensitive bool) (bool, error) {
if err == nil {
return false, nil
}
match, ok := Search(val, options, caseSensitive)
if match == "" || !ok {
return false, err
}
return true, &suggestError{
err: err, err: err,
match: match, match: match,
} }

29
vendor/github.com/planetscale/vtprotobuf/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,29 @@
Copyright (c) 2021, PlanetScale Inc. All rights reserved.
Copyright (c) 2013, The GoGo Authors. All rights reserved.
Copyright (c) 2018 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -0,0 +1,122 @@
// Package protohelpers provides helper functions for encoding and decoding protobuf messages.
// The spec can be found at https://protobuf.dev/programming-guides/encoding/.
package protohelpers
import (
"fmt"
"io"
"math/bits"
)
var (
// ErrInvalidLength is returned when decoding a negative length.
ErrInvalidLength = fmt.Errorf("proto: negative length found during unmarshaling")
// ErrIntOverflow is returned when decoding a varint representation of an integer that overflows 64 bits.
ErrIntOverflow = fmt.Errorf("proto: integer overflow")
// ErrUnexpectedEndOfGroup is returned when decoding a group end without a corresponding group start.
ErrUnexpectedEndOfGroup = fmt.Errorf("proto: unexpected end of group")
)
// EncodeVarint encodes a uint64 into a varint-encoded byte slice and returns the offset of the encoded value.
// The provided offset is the offset after the last byte of the encoded value.
func EncodeVarint(dAtA []byte, offset int, v uint64) int {
offset -= SizeOfVarint(v)
base := offset
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return base
}
// SizeOfVarint returns the size of the varint-encoded value.
func SizeOfVarint(x uint64) (n int) {
return (bits.Len64(x|1) + 6) / 7
}
// SizeOfZigzag returns the size of the zigzag-encoded value.
func SizeOfZigzag(x uint64) (n int) {
return SizeOfVarint(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
// Skip the first record of the byte slice and return the offset of the next record.
func Skip(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
depth := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflow
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflow
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
case 1:
iNdEx += 8
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflow
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLength
}
iNdEx += length
case 3:
depth++
case 4:
if depth == 0 {
return 0, ErrUnexpectedEndOfGroup
}
depth--
case 5:
iNdEx += 4
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
if iNdEx < 0 {
return 0, ErrInvalidLength
}
if depth == 0 {
return iNdEx, nil
}
}
return 0, io.ErrUnexpectedEOF
}

30
vendor/github.com/tonistiigi/go-csvvalue/.golangci.yml generated vendored Normal file
View File

@@ -0,0 +1,30 @@
run:
timeout: 10m
linters:
enable:
- bodyclose
- depguard
- errname
- forbidigo
- gocritic
- gofmt
- goimports
- gosec
- gosimple
- govet
- importas
- ineffassign
- makezero
- misspell
- noctx
- nolintlint
- revive
- staticcheck
- typecheck
- unused
- whitespace
disable-all: true
issues:
exclude-use-default: false

13
vendor/github.com/tonistiigi/go-csvvalue/.yamllint.yml generated vendored Normal file
View File

@@ -0,0 +1,13 @@
ignore: |
/vendor
extends: default
yaml-files:
- '*.yaml'
- '*.yml'
rules:
truthy: disable
line-length: disable
document-start: disable

42
vendor/github.com/tonistiigi/go-csvvalue/Dockerfile generated vendored Normal file
View File

@@ -0,0 +1,42 @@
#syntax=docker/dockerfile:1.8
#check=error=true
ARG GO_VERSION=1.22
ARG XX_VERSION=1.4.0
ARG COVER_FILENAME="cover.out"
ARG BENCH_FILENAME="bench.txt"
FROM --platform=${BUILDPLATFORM} tonistiigi/xx:${XX_VERSION} AS xx
FROM --platform=${BUILDPLATFORM} golang:${GO_VERSION}-alpine AS golang
COPY --link --from=xx / /
WORKDIR /src
ARG TARGETPLATFORM
FROM golang AS build
RUN --mount=target=/root/.cache,type=cache \
--mount=type=bind xx-go build .
FROM golang AS runbench
ARG BENCH_FILENAME
RUN --mount=target=/root/.cache,type=cache \
--mount=type=bind \
xx-go test -v --run skip --bench . | tee /tmp/${BENCH_FILENAME}
FROM scratch AS bench
ARG BENCH_FILENAME
COPY --from=runbench /tmp/${BENCH_FILENAME} /
FROM golang AS runtest
ARG TESTFLAGS="-v"
ARG COVER_FILENAME
RUN --mount=target=/root/.cache,type=cache \
--mount=type=bind \
xx-go test -coverprofile=/tmp/${COVER_FILENAME} $TESTFLAGS .
FROM scratch AS test
ARG COVER_FILENAME
COPY --from=runtest /tmp/${COVER_FILENAME} /
FROM build

22
vendor/github.com/tonistiigi/go-csvvalue/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,22 @@
MIT
Copyright 2024 Tõnis Tiigi <tonistiigi@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

1
vendor/github.com/tonistiigi/go-csvvalue/codecov.yml generated vendored Normal file
View File

@@ -0,0 +1 @@
comment: false

173
vendor/github.com/tonistiigi/go-csvvalue/csvvalue.go generated vendored Normal file
View File

@@ -0,0 +1,173 @@
// Package csvvalue provides an efficient parser for a single line CSV value.
// It is more efficient than the standard library csv package for parsing many
// small values. For multi-line CSV parsing, the standard library is recommended.
package csvvalue
import (
"encoding/csv"
"errors"
"io"
"strings"
"unicode"
"unicode/utf8"
)
var errInvalidDelim = errors.New("csv: invalid field or comment delimiter")
var defaultParser = NewParser()
// Fields parses the line with default parser and returns
// slice of fields for the record. If dst is nil, a new slice is allocated.
func Fields(inp string, dst []string) ([]string, error) {
return defaultParser.Fields(inp, dst)
}
// Parser is a CSV parser for a single line value.
type Parser struct {
Comma rune
LazyQuotes bool
TrimLeadingSpace bool
}
// NewParser returns a new Parser with default settings.
func NewParser() *Parser {
return &Parser{Comma: ','}
}
// Fields parses the line and returns slice of fields for the record.
// If dst is nil, a new slice is allocated.
// For backward compatibility, a trailing newline is allowed.
func (r *Parser) Fields(line string, dst []string) ([]string, error) {
if !validDelim(r.Comma) {
return nil, errInvalidDelim
}
if cap(dst) == 0 {
// imprecise estimate, strings.Count is fast
dst = make([]string, 0, 1+strings.Count(line, string(r.Comma)))
} else {
dst = dst[:0]
}
const quoteLen = len(`"`)
var (
pos int
commaLen = utf8.RuneLen(r.Comma)
trim = r.TrimLeadingSpace
)
// allow trailing newline for compatibility
if n := len(line); n > 0 && line[n-1] == '\n' {
if n > 1 && line[n-2] == '\r' {
line = line[:n-2]
} else {
line = line[:n-1]
}
}
if len(line) == 0 {
return nil, io.EOF
}
parseField:
for {
if trim {
i := strings.IndexFunc(line, func(r rune) bool {
return !unicode.IsSpace(r)
})
if i < 0 {
i = len(line)
}
line = line[i:]
pos += i
}
if len(line) == 0 || line[0] != '"' {
// Non-quoted string field
i := strings.IndexRune(line, r.Comma)
var field string
if i >= 0 {
field = line[:i]
} else {
field = line
}
// Check to make sure a quote does not appear in field.
if !r.LazyQuotes {
if j := strings.IndexRune(field, '"'); j >= 0 {
return nil, parseErr(pos+j, csv.ErrBareQuote)
}
}
dst = append(dst, field)
if i >= 0 {
line = line[i+commaLen:]
pos += i + commaLen
continue
}
break
}
// Quoted string field
line = line[quoteLen:]
pos += quoteLen
halfOpen := false
for {
i := strings.IndexRune(line, '"')
if i >= 0 {
// Hit next quote.
if !halfOpen {
dst = append(dst, line[:i])
} else {
appendToLast(dst, line[:i])
}
halfOpen = false
line = line[i+quoteLen:]
pos += i + quoteLen
switch rn := nextRune(line); {
case rn == '"':
// `""` sequence (append quote).
appendToLast(dst, "\"")
line = line[quoteLen:]
pos += quoteLen
halfOpen = true
case rn == r.Comma:
// `",` sequence (end of field).
line = line[commaLen:]
pos += commaLen
continue parseField
case len(line) == 0:
break parseField
case r.LazyQuotes:
// `"` sequence (bare quote).
appendToLast(dst, "\"")
halfOpen = true
default:
// `"*` sequence (invalid non-escaped quote).
return nil, parseErr(pos-quoteLen, csv.ErrQuote)
}
} else {
if !r.LazyQuotes {
return nil, parseErr(pos, csv.ErrQuote)
}
// Hit end of line (copy all data so far).
dst = append(dst, line)
break parseField
}
}
}
return dst, nil
}
func validDelim(r rune) bool {
return r != 0 && r != '"' && r != '\r' && r != '\n' && utf8.ValidRune(r) && r != utf8.RuneError
}
func appendToLast(dst []string, s string) {
dst[len(dst)-1] += s
}
func nextRune(b string) rune {
r, _ := utf8.DecodeRuneInString(b)
return r
}
func parseErr(pos int, err error) error {
return &csv.ParseError{StartLine: 1, Line: 1, Column: pos + 1, Err: err}
}

View File

@@ -0,0 +1,68 @@
variable "COVER_FILENAME" {
default = null
}
variable "BENCH_FILENAME" {
default = null
}
variable "GO_VERSION" {
default = null
}
target "default" {
targets = ["build"]
}
target "_all_platforms" {
platforms = [
"linux/amd64",
"linux/arm64",
"linux/arm/v7",
"linux/arm/v6",
"linux/386",
"linux/ppc64le",
"linux/s390x",
"darwin/amd64",
"darwin/arm64",
"windows/amd64",
]
}
target "build" {
output = ["type=cacheonly"]
args = {
GO_VERSION = GO_VERSION
}
}
target "build-all" {
inherits = ["build", "_all_platforms"]
}
target "test" {
target = "test"
args = {
COVER_FILENAME = COVER_FILENAME
GO_VERSION = GO_VERSION
}
output = [COVER_FILENAME!=null?".":"type=cacheonly"]
}
target "bench" {
target = "bench"
args = {
BENCH_FILENAME = BENCH_FILENAME
GO_VERSION = GO_VERSION
}
output = [BENCH_FILENAME!=null?".":"type=cacheonly"]
}
target "lint" {
dockerfile = "hack/dockerfiles/lint.Dockerfile"
output = ["type=cacheonly"]
}
target "lint-all" {
inherits = ["lint", "_all_platforms"]
}

44
vendor/github.com/tonistiigi/go-csvvalue/readme.md generated vendored Normal file
View File

@@ -0,0 +1,44 @@
### go-csvvalue
![GitHub Release](https://img.shields.io/github/v/release/tonistiigi/go-csvvalue)
[![Go Reference](https://pkg.go.dev/badge/github.com/tonistiigi/go-csvvalue.svg)](https://pkg.go.dev/github.com/tonistiigi/go-csvvalue)
![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/tonistiigi/go-csvvalue/ci.yml)
![Codecov](https://img.shields.io/codecov/c/github/tonistiigi/go-csvvalue)
![GitHub License](https://img.shields.io/github/license/tonistiigi/go-csvvalue)
`go-csvvalue` provides an efficient parser for a single-line CSV value.
It is more efficient than the standard library `encoding/csv` package for parsing many small values. The main problem with stdlib implementation is that it calls `bufio.NewReader` internally, allocating 4KB of memory on each invocation. For multi-line CSV parsing, the standard library is still recommended. If you wish to optimize memory usage for `encoding/csv`, call `csv.NewReader` with an instance of `*bufio.Reader` that already has a 4KB buffer allocated and then reuse that buffer for all reads.
For further memory optimization, an existing string slice can be optionally passed to be reused for returning the parsed fields.
For backwards compatibility with stdlib record parser, the input may contain a trailing newline character.
### Benchmark
```
goos: linux
goarch: amd64
pkg: github.com/tonistiigi/go-csvvalue
cpu: AMD EPYC 7763 64-Core Processor
BenchmarkFields/stdlib/withcache-4 1109917 1103 ns/op 4520 B/op 14 allocs/op
BenchmarkFields/stdlib/nocache-4 1082838 1125 ns/op 4520 B/op 14 allocs/op
BenchmarkFields/csvvalue/withcache-4 28554976 42.12 ns/op 0 B/op 0 allocs/op
BenchmarkFields/csvvalue/nocache-4 13666134 83.77 ns/op 48 B/op 1 allocs/op
```
```
goos: darwin
goarch: arm64
pkg: github.com/tonistiigi/go-csvvalue
BenchmarkFields/stdlib/nocache-10 1679923 784.9 ns/op 4520 B/op 14 allocs/op
BenchmarkFields/stdlib/withcache-10 1641891 826.9 ns/op 4520 B/op 14 allocs/op
BenchmarkFields/csvvalue/withcache-10 34399642 33.93 ns/op 0 B/op 0 allocs/op
BenchmarkFields/csvvalue/nocache-10 17441373 67.21 ns/op 48 B/op 1 allocs/op
PASS
```
### Credits
This package is mostly based on `encoding/csv` implementation and also uses that package for compatibility testing.

4
vendor/golang.org/x/time/LICENSE generated vendored
View File

@@ -1,4 +1,4 @@
Copyright (c) 2009 The Go Authors. All rights reserved. Copyright 2009 The Go Authors.
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are modification, are permitted provided that the following conditions are
@@ -10,7 +10,7 @@ notice, this list of conditions and the following disclaimer.
copyright notice, this list of conditions and the following disclaimer copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the in the documentation and/or other materials provided with the
distribution. distribution.
* Neither the name of Google Inc. nor the names of its * Neither the name of Google LLC nor the names of its
contributors may be used to endorse or promote products derived from contributors may be used to endorse or promote products derived from
this software without specific prior written permission. this software without specific prior written permission.

View File

@@ -52,6 +52,8 @@ func Every(interval time.Duration) Limit {
// or its associated context.Context is canceled. // or its associated context.Context is canceled.
// //
// The methods AllowN, ReserveN, and WaitN consume n tokens. // The methods AllowN, ReserveN, and WaitN consume n tokens.
//
// Limiter is safe for simultaneous use by multiple goroutines.
type Limiter struct { type Limiter struct {
mu sync.Mutex mu sync.Mutex
limit Limit limit Limit

19
vendor/modules.txt vendored
View File

@@ -247,7 +247,7 @@ github.com/google/gnostic-models/openapiv3
## explicit; go 1.12 ## explicit; go 1.12
github.com/google/gofuzz github.com/google/gofuzz
github.com/google/gofuzz/bytesource github.com/google/gofuzz/bytesource
# github.com/google/pprof v0.0.0-20230323073829-e72429f035bd # github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6
## explicit; go 1.19 ## explicit; go 1.19
github.com/google/pprof/profile github.com/google/pprof/profile
# github.com/google/uuid v1.6.0 # github.com/google/uuid v1.6.0
@@ -346,10 +346,11 @@ github.com/mitchellh/go-homedir
# github.com/mitchellh/mapstructure v1.5.0 # github.com/mitchellh/mapstructure v1.5.0
## explicit; go 1.14 ## explicit; go 1.14
github.com/mitchellh/mapstructure github.com/mitchellh/mapstructure
# github.com/moby/buildkit v0.13.2 # github.com/moby/buildkit v0.17.3
## explicit; go 1.21 ## explicit; go 1.22.0
github.com/moby/buildkit/frontend/dockerfile/command github.com/moby/buildkit/frontend/dockerfile/command
github.com/moby/buildkit/frontend/dockerfile/instructions github.com/moby/buildkit/frontend/dockerfile/instructions
github.com/moby/buildkit/frontend/dockerfile/linter
github.com/moby/buildkit/frontend/dockerfile/parser github.com/moby/buildkit/frontend/dockerfile/parser
github.com/moby/buildkit/frontend/dockerfile/shell github.com/moby/buildkit/frontend/dockerfile/shell
github.com/moby/buildkit/util/stack github.com/moby/buildkit/util/stack
@@ -404,6 +405,9 @@ github.com/pkg/errors
# github.com/pkg/profile v1.7.0 # github.com/pkg/profile v1.7.0
## explicit; go 1.13 ## explicit; go 1.13
github.com/pkg/profile github.com/pkg/profile
# github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10
## explicit; go 1.20
github.com/planetscale/vtprotobuf/protohelpers
# github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 # github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2
## explicit ## explicit
github.com/pmezard/go-difflib/difflib github.com/pmezard/go-difflib/difflib
@@ -439,6 +443,9 @@ github.com/stretchr/testify/require
# github.com/tidwall/pretty v1.2.1 # github.com/tidwall/pretty v1.2.1
## explicit; go 1.16 ## explicit; go 1.16
github.com/tidwall/pretty github.com/tidwall/pretty
# github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4
## explicit; go 1.16
github.com/tonistiigi/go-csvvalue
# github.com/vanng822/css v0.0.0-20190504095207-a21e860bcd04 # github.com/vanng822/css v0.0.0-20190504095207-a21e860bcd04
## explicit ## explicit
github.com/vanng822/css github.com/vanng822/css
@@ -485,7 +492,7 @@ golang.org/x/exp/maps
# golang.org/x/mod v0.22.0 # golang.org/x/mod v0.22.0
## explicit; go 1.22.0 ## explicit; go 1.22.0
golang.org/x/mod/semver golang.org/x/mod/semver
# golang.org/x/net v0.28.0 # golang.org/x/net v0.29.0
## explicit; go 1.18 ## explicit; go 1.18
golang.org/x/net/html golang.org/x/net/html
golang.org/x/net/html/atom golang.org/x/net/html/atom
@@ -527,8 +534,8 @@ golang.org/x/text/transform
golang.org/x/text/unicode/bidi golang.org/x/text/unicode/bidi
golang.org/x/text/unicode/norm golang.org/x/text/unicode/norm
golang.org/x/text/width golang.org/x/text/width
# golang.org/x/time v0.3.0 # golang.org/x/time v0.6.0
## explicit ## explicit; go 1.18
golang.org/x/time/rate golang.org/x/time/rate
# google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 # google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1
## explicit; go 1.21 ## explicit; go 1.21