mirror of
https://github.com/kubevela/kubevela.git
synced 2026-02-14 18:10:21 +00:00
Chore: update cue version to 0.4.3 (#4425)
* Chore: update cue version to 0.4.3 Signed-off-by: FogDong <dongtianxin.tx@alibaba-inc.com> * resolve some comments Signed-off-by: FogDong <dongtianxin.tx@alibaba-inc.com> * fix lint Signed-off-by: FogDong <dongtianxin.tx@alibaba-inc.com> * add more tests Signed-off-by: FogDong <dongtianxin.tx@alibaba-inc.com> * rebase and add more tests Signed-off-by: FogDong <dongtianxin.tx@alibaba-inc.com> * resolve comments Signed-off-by: FogDong <dongtianxin.tx@alibaba-inc.com>
This commit is contained in:
@@ -222,8 +222,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ spec:
|
||||
|
||||
mountsArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -26,7 +26,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -35,7 +35,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -44,7 +44,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -53,7 +53,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.mountPropagation != _|_ {
|
||||
@@ -69,7 +69,7 @@ spec:
|
||||
}
|
||||
volumesArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
name: v.name
|
||||
persistentVolumeClaim: claimName: v.claimName
|
||||
@@ -78,7 +78,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
name: v.name
|
||||
configMap: {
|
||||
@@ -93,7 +93,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
name: v.name
|
||||
secret: {
|
||||
@@ -108,7 +108,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
name: v.name
|
||||
emptyDir: medium: v.medium
|
||||
@@ -117,7 +117,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
name: v.name
|
||||
hostPath: path: v.path
|
||||
@@ -275,7 +275,7 @@ spec:
|
||||
}
|
||||
}
|
||||
exposePorts: [
|
||||
for v in parameter.ports if v.expose == true {
|
||||
if parameter.ports != _|_ for v in parameter.ports if v.expose == true {
|
||||
port: v.port
|
||||
targetPort: v.port
|
||||
if v.name != _|_ {
|
||||
@@ -428,8 +428,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -21,9 +21,9 @@ spec:
|
||||
app: op.#Steps & {
|
||||
load: op.#Load @step(1)
|
||||
clusters: [...string]
|
||||
listClusters: op.#ListClusters @step(2)
|
||||
if parameter.clusters == _|_ {
|
||||
listClusters: op.#ListClusters @step(2)
|
||||
clusters: listClusters.outputs.clusters
|
||||
clusters: listClusters.outputs.clusters
|
||||
}
|
||||
if parameter.clusters != _|_ {
|
||||
clusters: parameter.clusters
|
||||
|
||||
@@ -94,11 +94,11 @@ spec:
|
||||
}
|
||||
status:
|
||||
customStatus: |-
|
||||
let igs = context.outputs.ingress.status.loadBalancer.ingress
|
||||
if igs == _|_ {
|
||||
if context.outputs.ingress.status.loadBalancer.ingress == _|_ {
|
||||
message: "No loadBalancer found, visiting by using 'vela port-forward " + context.appName + "'\n"
|
||||
}
|
||||
if len(igs) > 0 {
|
||||
if context.outputs.ingress.status.loadBalancer.ingress != _|_ {
|
||||
let igs = context.outputs.ingress.status.loadBalancer.ingress
|
||||
if igs[0].ip != _|_ {
|
||||
if igs[0].host != _|_ {
|
||||
message: "Visiting URL: " + context.outputs.ingress.spec.rules[0].host + ", IP: " + igs[0].ip
|
||||
|
||||
@@ -201,10 +201,20 @@ spec:
|
||||
verbatim?: bool
|
||||
}
|
||||
option: {
|
||||
text: textType
|
||||
value: string
|
||||
description?: textType
|
||||
url?: string
|
||||
text: {
|
||||
type: string
|
||||
text: string
|
||||
emoji?: bool
|
||||
verbatim?: bool
|
||||
}
|
||||
value: string
|
||||
description?: {
|
||||
type: string
|
||||
text: string
|
||||
emoji?: bool
|
||||
verbatim?: bool
|
||||
}
|
||||
url?: string
|
||||
}
|
||||
// send webhook notification
|
||||
ding: op.#Steps & {
|
||||
|
||||
@@ -41,8 +41,8 @@ spec:
|
||||
}
|
||||
// +patchStrategy=retainKeys
|
||||
patch: spec: template: spec: serviceAccountName: parameter.name
|
||||
_clusterPrivileges: [ for p in parameter.privileges if p.scope == "cluster" {p}]
|
||||
_namespacePrivileges: [ for p in parameter.privileges if p.scope == "namespace" {p}]
|
||||
_clusterPrivileges: [ if parameter.privileges != _|_ for p in parameter.privileges if p.scope == "cluster" {p}]
|
||||
_namespacePrivileges: [ if parameter.privileges != _|_ for p in parameter.privileges if p.scope == "namespace" {p}]
|
||||
outputs: {
|
||||
if parameter.create {
|
||||
"service-account": {
|
||||
|
||||
@@ -18,7 +18,7 @@ spec:
|
||||
cue:
|
||||
template: |
|
||||
pvcVolumesList: *[
|
||||
for v in parameter.pvc {
|
||||
if parameter.pvc != _|_ for v in parameter.pvc {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
persistentVolumeClaim: claimName: v.name
|
||||
@@ -26,7 +26,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
configMapVolumesList: *[
|
||||
for v in parameter.configMap if v.mountPath != _|_ {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap if v.mountPath != _|_ {
|
||||
{
|
||||
name: "configmap-" + v.name
|
||||
configMap: {
|
||||
@@ -40,7 +40,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
secretVolumesList: *[
|
||||
for v in parameter.secret if v.mountPath != _|_ {
|
||||
if parameter.secret != _|_ for v in parameter.secret if v.mountPath != _|_ {
|
||||
{
|
||||
name: "secret-" + v.name
|
||||
secret: {
|
||||
@@ -54,7 +54,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
emptyDirVolumesList: *[
|
||||
for v in parameter.emptyDir {
|
||||
if parameter.emptyDir != _|_ for v in parameter.emptyDir {
|
||||
{
|
||||
name: "emptydir-" + v.name
|
||||
emptyDir: medium: v.medium
|
||||
@@ -62,7 +62,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
pvcVolumeMountsList: *[
|
||||
for v in parameter.pvc {
|
||||
if parameter.pvc != _|_ for v in parameter.pvc {
|
||||
if v.volumeMode == "Filesystem" {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
@@ -75,7 +75,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
configMapVolumeMountsList: *[
|
||||
for v in parameter.configMap if v.mountPath != _|_ {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap if v.mountPath != _|_ {
|
||||
{
|
||||
name: "configmap-" + v.name
|
||||
mountPath: v.mountPath
|
||||
@@ -86,7 +86,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
configMapEnvMountsList: *[
|
||||
for v in parameter.configMap if v.mountToEnv != _|_ {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap if v.mountToEnv != _|_ {
|
||||
{
|
||||
name: v.mountToEnv.envName
|
||||
valueFrom: configMapKeyRef: {
|
||||
@@ -97,7 +97,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
configMountToEnvsList: *[
|
||||
for v in parameter.configMap if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
{
|
||||
name: k.envName
|
||||
valueFrom: configMapKeyRef: {
|
||||
@@ -108,7 +108,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
secretVolumeMountsList: *[
|
||||
for v in parameter.secret if v.mountPath != _|_ {
|
||||
if parameter.secret != _|_ for v in parameter.secret if v.mountPath != _|_ {
|
||||
{
|
||||
name: "secret-" + v.name
|
||||
mountPath: v.mountPath
|
||||
@@ -119,7 +119,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
secretEnvMountsList: *[
|
||||
for v in parameter.secret if v.mountToEnv != _|_ {
|
||||
if parameter.secret != _|_ if parameter.secret != _|_ for v in parameter.secret if v.mountToEnv != _|_ {
|
||||
{
|
||||
name: v.mountToEnv.envName
|
||||
valueFrom: secretKeyRef: {
|
||||
@@ -130,7 +130,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
secretMountToEnvsList: *[
|
||||
for v in parameter.secret if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
if parameter.secret != _|_ for v in parameter.secret if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
{
|
||||
name: k.envName
|
||||
valueFrom: secretKeyRef: {
|
||||
@@ -141,7 +141,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
emptyDirVolumeMountsList: *[
|
||||
for v in parameter.emptyDir {
|
||||
if parameter.emptyDir != _|_ for v in parameter.emptyDir {
|
||||
{
|
||||
name: "emptydir-" + v.name
|
||||
mountPath: v.mountPath
|
||||
@@ -152,7 +152,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
volumeDevicesList: *[
|
||||
for v in parameter.pvc if v.volumeMode == "Block" {
|
||||
if parameter.pvc != _|_ for v in parameter.pvc if v.volumeMode == "Block" {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
devicePath: v.mountPath
|
||||
@@ -190,7 +190,7 @@ spec:
|
||||
|
||||
}
|
||||
outputs: {
|
||||
for v in parameter.pvc {
|
||||
if parameter.pvc != _|_ for v in parameter.pvc {
|
||||
if v.mountOnly == false {
|
||||
"pvc-\(v.name)": {
|
||||
apiVersion: "v1"
|
||||
@@ -229,7 +229,7 @@ spec:
|
||||
}
|
||||
}
|
||||
|
||||
for v in parameter.configMap {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap {
|
||||
if v.mountOnly == false {
|
||||
"configmap-\(v.name)": {
|
||||
apiVersion: "v1"
|
||||
@@ -242,7 +242,7 @@ spec:
|
||||
}
|
||||
}
|
||||
|
||||
for v in parameter.secret {
|
||||
if parameter.secret != _|_ for v in parameter.secret {
|
||||
if v.mountOnly == false {
|
||||
"secret-\(v.name)": {
|
||||
apiVersion: "v1"
|
||||
|
||||
@@ -63,7 +63,7 @@ spec:
|
||||
}
|
||||
|
||||
if parameter["volumes"] != _|_ {
|
||||
volumeMounts: [ for v in parameter.volumes {
|
||||
volumeMounts: [ if parameter.volumes != _|_ for v in parameter.volumes {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -72,7 +72,7 @@ spec:
|
||||
}]
|
||||
|
||||
if parameter["volumes"] != _|_ {
|
||||
volumes: [ for v in parameter.volumes {
|
||||
volumes: [ if parameter.volumes != _|_ for v in parameter.volumes {
|
||||
{
|
||||
name: v.name
|
||||
if v.type == "pvc" {
|
||||
@@ -103,7 +103,7 @@ spec:
|
||||
}
|
||||
|
||||
if parameter["imagePullSecrets"] != _|_ {
|
||||
imagePullSecrets: [ for v in parameter.imagePullSecrets {
|
||||
imagePullSecrets: [ if parameter.imagePullSecrets != _|_ for v in parameter.imagePullSecrets {
|
||||
name: v
|
||||
},
|
||||
]
|
||||
@@ -175,8 +175,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -54,8 +54,8 @@ spec:
|
||||
// +usage=Declare volumes and volumeMounts
|
||||
volumes?: [...{
|
||||
name: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -133,10 +133,20 @@ spec:
|
||||
verbatim?: bool
|
||||
}
|
||||
option: {
|
||||
text: textType
|
||||
value: string
|
||||
description?: textType
|
||||
url?: string
|
||||
text: {
|
||||
type: string
|
||||
text: string
|
||||
emoji?: bool
|
||||
verbatim?: bool
|
||||
}
|
||||
value: string
|
||||
description?: {
|
||||
type: string
|
||||
text: string
|
||||
emoji?: bool
|
||||
verbatim?: bool
|
||||
}
|
||||
url?: string
|
||||
}
|
||||
secretRef: {
|
||||
name: string
|
||||
|
||||
@@ -17,7 +17,7 @@ spec:
|
||||
|
||||
mountsArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -29,7 +29,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -41,7 +41,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -53,7 +53,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -65,7 +65,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -78,7 +78,7 @@ spec:
|
||||
}
|
||||
volumesArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
name: v.name
|
||||
persistentVolumeClaim: claimName: v.claimName
|
||||
@@ -87,7 +87,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
name: v.name
|
||||
configMap: {
|
||||
@@ -102,7 +102,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
name: v.name
|
||||
secret: {
|
||||
@@ -117,7 +117,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
name: v.name
|
||||
emptyDir: medium: v.medium
|
||||
@@ -126,7 +126,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
name: v.name
|
||||
hostPath: path: v.path
|
||||
@@ -139,11 +139,11 @@ spec:
|
||||
for val in [
|
||||
for i, vi in volumesList {
|
||||
for j, vj in volumesList if j < i && vi.name == vj.name {
|
||||
_ignore: true
|
||||
ignore: true
|
||||
}
|
||||
vi
|
||||
},
|
||||
] if val._ignore == _|_ {
|
||||
] if val.ignore == _|_ {
|
||||
val
|
||||
},
|
||||
]
|
||||
@@ -297,7 +297,7 @@ spec:
|
||||
}
|
||||
}
|
||||
exposePorts: [
|
||||
for v in parameter.ports if v.expose == true {
|
||||
if parameter.ports != _|_ for v in parameter.ports if v.expose == true {
|
||||
port: v.port
|
||||
targetPort: v.port
|
||||
if v.name != _|_ {
|
||||
@@ -453,8 +453,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ spec:
|
||||
template: |
|
||||
mountsArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -24,7 +24,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -33,7 +33,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -42,7 +42,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -51,7 +51,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -61,7 +61,7 @@ spec:
|
||||
}
|
||||
volumesArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
name: v.name
|
||||
persistentVolumeClaim: claimName: v.claimName
|
||||
@@ -70,7 +70,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
name: v.name
|
||||
configMap: {
|
||||
@@ -85,7 +85,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
name: v.name
|
||||
secret: {
|
||||
@@ -100,7 +100,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
name: v.name
|
||||
emptyDir: medium: v.medium
|
||||
@@ -109,7 +109,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
name: v.name
|
||||
hostPath: path: v.path
|
||||
@@ -322,8 +322,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -222,8 +222,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ spec:
|
||||
|
||||
mountsArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -26,7 +26,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -35,7 +35,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -44,7 +44,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -53,7 +53,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.mountPropagation != _|_ {
|
||||
@@ -69,7 +69,7 @@ spec:
|
||||
}
|
||||
volumesArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
name: v.name
|
||||
persistentVolumeClaim: claimName: v.claimName
|
||||
@@ -78,7 +78,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
name: v.name
|
||||
configMap: {
|
||||
@@ -93,7 +93,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
name: v.name
|
||||
secret: {
|
||||
@@ -108,7 +108,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
name: v.name
|
||||
emptyDir: medium: v.medium
|
||||
@@ -117,7 +117,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
name: v.name
|
||||
hostPath: path: v.path
|
||||
@@ -275,7 +275,7 @@ spec:
|
||||
}
|
||||
}
|
||||
exposePorts: [
|
||||
for v in parameter.ports if v.expose == true {
|
||||
if parameter.ports != _|_ for v in parameter.ports if v.expose == true {
|
||||
port: v.port
|
||||
targetPort: v.port
|
||||
if v.name != _|_ {
|
||||
@@ -428,8 +428,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -21,9 +21,9 @@ spec:
|
||||
app: op.#Steps & {
|
||||
load: op.#Load @step(1)
|
||||
clusters: [...string]
|
||||
listClusters: op.#ListClusters @step(2)
|
||||
if parameter.clusters == _|_ {
|
||||
listClusters: op.#ListClusters @step(2)
|
||||
clusters: listClusters.outputs.clusters
|
||||
clusters: listClusters.outputs.clusters
|
||||
}
|
||||
if parameter.clusters != _|_ {
|
||||
clusters: parameter.clusters
|
||||
|
||||
@@ -94,11 +94,11 @@ spec:
|
||||
}
|
||||
status:
|
||||
customStatus: |-
|
||||
let igs = context.outputs.ingress.status.loadBalancer.ingress
|
||||
if igs == _|_ {
|
||||
if context.outputs.ingress.status.loadBalancer.ingress == _|_ {
|
||||
message: "No loadBalancer found, visiting by using 'vela port-forward " + context.appName + "'\n"
|
||||
}
|
||||
if len(igs) > 0 {
|
||||
if context.outputs.ingress.status.loadBalancer.ingress != _|_ {
|
||||
let igs = context.outputs.ingress.status.loadBalancer.ingress
|
||||
if igs[0].ip != _|_ {
|
||||
if igs[0].host != _|_ {
|
||||
message: "Visiting URL: " + context.outputs.ingress.spec.rules[0].host + ", IP: " + igs[0].ip
|
||||
|
||||
@@ -201,10 +201,20 @@ spec:
|
||||
verbatim?: bool
|
||||
}
|
||||
option: {
|
||||
text: textType
|
||||
value: string
|
||||
description?: textType
|
||||
url?: string
|
||||
text: {
|
||||
type: string
|
||||
text: string
|
||||
emoji?: bool
|
||||
verbatim?: bool
|
||||
}
|
||||
value: string
|
||||
description?: {
|
||||
type: string
|
||||
text: string
|
||||
emoji?: bool
|
||||
verbatim?: bool
|
||||
}
|
||||
url?: string
|
||||
}
|
||||
// send webhook notification
|
||||
ding: op.#Steps & {
|
||||
|
||||
@@ -41,8 +41,8 @@ spec:
|
||||
}
|
||||
// +patchStrategy=retainKeys
|
||||
patch: spec: template: spec: serviceAccountName: parameter.name
|
||||
_clusterPrivileges: [ for p in parameter.privileges if p.scope == "cluster" {p}]
|
||||
_namespacePrivileges: [ for p in parameter.privileges if p.scope == "namespace" {p}]
|
||||
_clusterPrivileges: [ if parameter.privileges != _|_ for p in parameter.privileges if p.scope == "cluster" {p}]
|
||||
_namespacePrivileges: [ if parameter.privileges != _|_ for p in parameter.privileges if p.scope == "namespace" {p}]
|
||||
outputs: {
|
||||
if parameter.create {
|
||||
"service-account": {
|
||||
|
||||
@@ -18,7 +18,7 @@ spec:
|
||||
cue:
|
||||
template: |
|
||||
pvcVolumesList: *[
|
||||
for v in parameter.pvc {
|
||||
if parameter.pvc != _|_ for v in parameter.pvc {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
persistentVolumeClaim: claimName: v.name
|
||||
@@ -26,7 +26,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
configMapVolumesList: *[
|
||||
for v in parameter.configMap if v.mountPath != _|_ {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap if v.mountPath != _|_ {
|
||||
{
|
||||
name: "configmap-" + v.name
|
||||
configMap: {
|
||||
@@ -40,7 +40,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
secretVolumesList: *[
|
||||
for v in parameter.secret if v.mountPath != _|_ {
|
||||
if parameter.secret != _|_ for v in parameter.secret if v.mountPath != _|_ {
|
||||
{
|
||||
name: "secret-" + v.name
|
||||
secret: {
|
||||
@@ -54,7 +54,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
emptyDirVolumesList: *[
|
||||
for v in parameter.emptyDir {
|
||||
if parameter.emptyDir != _|_ for v in parameter.emptyDir {
|
||||
{
|
||||
name: "emptydir-" + v.name
|
||||
emptyDir: medium: v.medium
|
||||
@@ -62,7 +62,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
pvcVolumeMountsList: *[
|
||||
for v in parameter.pvc {
|
||||
if parameter.pvc != _|_ for v in parameter.pvc {
|
||||
if v.volumeMode == "Filesystem" {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
@@ -75,7 +75,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
configMapVolumeMountsList: *[
|
||||
for v in parameter.configMap if v.mountPath != _|_ {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap if v.mountPath != _|_ {
|
||||
{
|
||||
name: "configmap-" + v.name
|
||||
mountPath: v.mountPath
|
||||
@@ -86,7 +86,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
configMapEnvMountsList: *[
|
||||
for v in parameter.configMap if v.mountToEnv != _|_ {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap if v.mountToEnv != _|_ {
|
||||
{
|
||||
name: v.mountToEnv.envName
|
||||
valueFrom: configMapKeyRef: {
|
||||
@@ -97,7 +97,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
configMountToEnvsList: *[
|
||||
for v in parameter.configMap if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
{
|
||||
name: k.envName
|
||||
valueFrom: configMapKeyRef: {
|
||||
@@ -108,7 +108,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
secretVolumeMountsList: *[
|
||||
for v in parameter.secret if v.mountPath != _|_ {
|
||||
if parameter.secret != _|_ for v in parameter.secret if v.mountPath != _|_ {
|
||||
{
|
||||
name: "secret-" + v.name
|
||||
mountPath: v.mountPath
|
||||
@@ -119,7 +119,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
secretEnvMountsList: *[
|
||||
for v in parameter.secret if v.mountToEnv != _|_ {
|
||||
if parameter.secret != _|_ if parameter.secret != _|_ for v in parameter.secret if v.mountToEnv != _|_ {
|
||||
{
|
||||
name: v.mountToEnv.envName
|
||||
valueFrom: secretKeyRef: {
|
||||
@@ -130,7 +130,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
secretMountToEnvsList: *[
|
||||
for v in parameter.secret if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
if parameter.secret != _|_ for v in parameter.secret if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
{
|
||||
name: k.envName
|
||||
valueFrom: secretKeyRef: {
|
||||
@@ -141,7 +141,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
emptyDirVolumeMountsList: *[
|
||||
for v in parameter.emptyDir {
|
||||
if parameter.emptyDir != _|_ for v in parameter.emptyDir {
|
||||
{
|
||||
name: "emptydir-" + v.name
|
||||
mountPath: v.mountPath
|
||||
@@ -152,7 +152,7 @@ spec:
|
||||
},
|
||||
] | []
|
||||
volumeDevicesList: *[
|
||||
for v in parameter.pvc if v.volumeMode == "Block" {
|
||||
if parameter.pvc != _|_ for v in parameter.pvc if v.volumeMode == "Block" {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
devicePath: v.mountPath
|
||||
@@ -190,7 +190,7 @@ spec:
|
||||
|
||||
}
|
||||
outputs: {
|
||||
for v in parameter.pvc {
|
||||
if parameter.pvc != _|_ for v in parameter.pvc {
|
||||
if v.mountOnly == false {
|
||||
"pvc-\(v.name)": {
|
||||
apiVersion: "v1"
|
||||
@@ -229,7 +229,7 @@ spec:
|
||||
}
|
||||
}
|
||||
|
||||
for v in parameter.configMap {
|
||||
if parameter.configMap != _|_ for v in parameter.configMap {
|
||||
if v.mountOnly == false {
|
||||
"configmap-\(v.name)": {
|
||||
apiVersion: "v1"
|
||||
@@ -242,7 +242,7 @@ spec:
|
||||
}
|
||||
}
|
||||
|
||||
for v in parameter.secret {
|
||||
if parameter.secret != _|_ for v in parameter.secret {
|
||||
if v.mountOnly == false {
|
||||
"secret-\(v.name)": {
|
||||
apiVersion: "v1"
|
||||
|
||||
@@ -63,7 +63,7 @@ spec:
|
||||
}
|
||||
|
||||
if parameter["volumes"] != _|_ {
|
||||
volumeMounts: [ for v in parameter.volumes {
|
||||
volumeMounts: [ if parameter.volumes != _|_ for v in parameter.volumes {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -72,7 +72,7 @@ spec:
|
||||
}]
|
||||
|
||||
if parameter["volumes"] != _|_ {
|
||||
volumes: [ for v in parameter.volumes {
|
||||
volumes: [ if parameter.volumes != _|_ for v in parameter.volumes {
|
||||
{
|
||||
name: v.name
|
||||
if v.type == "pvc" {
|
||||
@@ -103,7 +103,7 @@ spec:
|
||||
}
|
||||
|
||||
if parameter["imagePullSecrets"] != _|_ {
|
||||
imagePullSecrets: [ for v in parameter.imagePullSecrets {
|
||||
imagePullSecrets: [ if parameter.imagePullSecrets != _|_ for v in parameter.imagePullSecrets {
|
||||
name: v
|
||||
},
|
||||
]
|
||||
@@ -175,8 +175,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ spec:
|
||||
|
||||
mountsArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -29,7 +29,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -41,7 +41,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -53,7 +53,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -65,7 +65,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
if v.subPath != _|_ {
|
||||
@@ -78,7 +78,7 @@ spec:
|
||||
}
|
||||
volumesArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
name: v.name
|
||||
persistentVolumeClaim: claimName: v.claimName
|
||||
@@ -87,7 +87,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
name: v.name
|
||||
configMap: {
|
||||
@@ -102,7 +102,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
name: v.name
|
||||
secret: {
|
||||
@@ -117,7 +117,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
name: v.name
|
||||
emptyDir: medium: v.medium
|
||||
@@ -126,7 +126,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
name: v.name
|
||||
hostPath: path: v.path
|
||||
@@ -139,11 +139,11 @@ spec:
|
||||
for val in [
|
||||
for i, vi in volumesList {
|
||||
for j, vj in volumesList if j < i && vi.name == vj.name {
|
||||
_ignore: true
|
||||
ignore: true
|
||||
}
|
||||
vi
|
||||
},
|
||||
] if val._ignore == _|_ {
|
||||
] if val.ignore == _|_ {
|
||||
val
|
||||
},
|
||||
]
|
||||
@@ -297,7 +297,7 @@ spec:
|
||||
}
|
||||
}
|
||||
exposePorts: [
|
||||
for v in parameter.ports if v.expose == true {
|
||||
if parameter.ports != _|_ for v in parameter.ports if v.expose == true {
|
||||
port: v.port
|
||||
targetPort: v.port
|
||||
if v.name != _|_ {
|
||||
@@ -453,8 +453,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ spec:
|
||||
template: |
|
||||
mountsArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -24,7 +24,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -33,7 +33,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -42,7 +42,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -51,7 +51,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
mountPath: v.mountPath
|
||||
name: v.name
|
||||
@@ -61,7 +61,7 @@ spec:
|
||||
}
|
||||
volumesArray: {
|
||||
pvc: *[
|
||||
for v in parameter.volumeMounts.pvc {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.pvc != _|_ for v in parameter.volumeMounts.pvc {
|
||||
{
|
||||
name: v.name
|
||||
persistentVolumeClaim: claimName: v.claimName
|
||||
@@ -70,7 +70,7 @@ spec:
|
||||
] | []
|
||||
|
||||
configMap: *[
|
||||
for v in parameter.volumeMounts.configMap {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.configMap != _|_ for v in parameter.volumeMounts.configMap {
|
||||
{
|
||||
name: v.name
|
||||
configMap: {
|
||||
@@ -85,7 +85,7 @@ spec:
|
||||
] | []
|
||||
|
||||
secret: *[
|
||||
for v in parameter.volumeMounts.secret {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.secret != _|_ for v in parameter.volumeMounts.secret {
|
||||
{
|
||||
name: v.name
|
||||
secret: {
|
||||
@@ -100,7 +100,7 @@ spec:
|
||||
] | []
|
||||
|
||||
emptyDir: *[
|
||||
for v in parameter.volumeMounts.emptyDir {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.emptyDir != _|_ for v in parameter.volumeMounts.emptyDir {
|
||||
{
|
||||
name: v.name
|
||||
emptyDir: medium: v.medium
|
||||
@@ -109,7 +109,7 @@ spec:
|
||||
] | []
|
||||
|
||||
hostPath: *[
|
||||
for v in parameter.volumeMounts.hostPath {
|
||||
if parameter.volumeMounts != _|_ && parameter.volumeMounts.hostPath != _|_ for v in parameter.volumeMounts.hostPath {
|
||||
{
|
||||
name: v.name
|
||||
hostPath: path: v.path
|
||||
@@ -322,8 +322,8 @@ spec:
|
||||
volumes?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir"
|
||||
type: "pvc" | "configMap" | "secret" | "emptyDir"
|
||||
// +usage=Specify volume type, options: "pvc","configMap","secret","emptyDir", default to emptyDir
|
||||
type: *"emptyDir" | "pvc" | "configMap" | "secret"
|
||||
if type == "pvc" {
|
||||
claimName: string
|
||||
}
|
||||
|
||||
@@ -984,10 +984,10 @@ var showCdResult = `# Specification
|
||||
+---------+--------------------------------------------------------------------------------------------------+----------+----------+---------+
|
||||
| NAME | DESCRIPTION | TYPE | REQUIRED | DEFAULT |
|
||||
+---------+--------------------------------------------------------------------------------------------------+----------+----------+---------+
|
||||
| cmd | Commands to run in the container. | []string | false | |
|
||||
| count | specify number of tasks to run in parallel. | int | false | 1 |
|
||||
| restart | Define the job restart policy, the value can only be Never or OnFailure. By default, it's Never. | string | false | Never |
|
||||
| image | Which image would you like to use for your service. | string | true | |
|
||||
| restart | Define the job restart policy, the value can only be Never or OnFailure. By default, it's Never. | string | false | Never |
|
||||
| cmd | Commands to run in the container. | []string | false | |
|
||||
+---------+--------------------------------------------------------------------------------------------------+----------+----------+---------+
|
||||
|
||||
|
||||
|
||||
12
go.mod
12
go.mod
@@ -3,7 +3,7 @@ module github.com/oam-dev/kubevela
|
||||
go 1.17
|
||||
|
||||
require (
|
||||
cuelang.org/go v0.2.2
|
||||
cuelang.org/go v0.4.4-0.20220729051708-0a46a1624353
|
||||
github.com/AlecAivazis/survey/v2 v2.1.1
|
||||
github.com/FogDong/uitable v0.0.5
|
||||
github.com/Masterminds/semver/v3 v3.1.1
|
||||
@@ -83,7 +83,7 @@ require (
|
||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
|
||||
gopkg.in/src-d/go-git.v4 v4.13.1
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
gotest.tools v2.2.0+incompatible
|
||||
helm.sh/helm/v3 v3.7.2
|
||||
istio.io/client-go v1.13.4
|
||||
@@ -124,7 +124,7 @@ require (
|
||||
sigs.k8s.io/gateway-api v0.4.3
|
||||
)
|
||||
|
||||
require github.com/rogpeppe/go-internal v1.8.0
|
||||
require github.com/rogpeppe/go-internal v1.8.1
|
||||
|
||||
require (
|
||||
cloud.google.com/go/compute v1.7.0 // indirect
|
||||
@@ -175,7 +175,7 @@ require (
|
||||
github.com/docker/go-metrics v0.0.1 // indirect
|
||||
github.com/docker/go-units v0.4.0 // indirect
|
||||
github.com/emicklei/go-restful v2.9.5+incompatible // indirect
|
||||
github.com/emicklei/proto v1.6.15 // indirect
|
||||
github.com/emicklei/proto v1.10.0 // indirect
|
||||
github.com/emirpasic/gods v1.12.0 // indirect
|
||||
github.com/evanphx/json-patch/v5 v5.1.0 // indirect
|
||||
github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d // indirect
|
||||
@@ -197,6 +197,7 @@ require (
|
||||
github.com/gobuffalo/flect v0.2.3 // indirect
|
||||
github.com/gobwas/glob v0.2.3 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang/glog v1.0.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/golang/snappy v0.0.3 // indirect
|
||||
@@ -235,7 +236,7 @@ require (
|
||||
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.0 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
github.com/moby/locker v1.0.1 // indirect
|
||||
github.com/moby/spdystream v0.2.0 // indirect
|
||||
@@ -258,6 +259,7 @@ require (
|
||||
github.com/prometheus/client_model v0.2.0 // indirect
|
||||
github.com/prometheus/common v0.28.0 // indirect
|
||||
github.com/prometheus/procfs v0.6.0 // indirect
|
||||
github.com/protocolbuffers/txtpbfmt v0.0.0-20220428173112-74888fd59c2b // indirect
|
||||
github.com/rubenv/sql-migrate v0.0.0-20210614095031-55d5740dbbcc // indirect
|
||||
github.com/russross/blackfriday v1.6.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
|
||||
23
go.sum
23
go.sum
@@ -72,8 +72,8 @@ cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq
|
||||
collectd.org v0.3.0/go.mod h1:A/8DzQBkF6abtvrT2j/AU/4tiBgJWYyh0y/oB/4MlWE=
|
||||
contrib.go.opencensus.io/exporter/ocagent v0.6.0/go.mod h1:zmKjrJcdo0aYcVS7bmEeSEBLPA9YJp5bjrofdU3pIXs=
|
||||
contrib.go.opencensus.io/exporter/stackdriver v0.13.4/go.mod h1:aXENhDJ1Y4lIg4EUaVTwzvYETVNZk10Pu26tevFKLUc=
|
||||
cuelang.org/go v0.2.2 h1:i/wFo48WDibGHKQTRZ08nB8PqmGpVpQ2sRflZPj73nQ=
|
||||
cuelang.org/go v0.2.2/go.mod h1:Dyjk8Y/B3CfFT1jQKJU0g5PpCeMiDe0yMOhk57oXwqo=
|
||||
cuelang.org/go v0.4.4-0.20220729051708-0a46a1624353 h1:zKp5hMLvsOulekPnhK2HaXKeXBTTfSzy209Yc01DPD8=
|
||||
cuelang.org/go v0.4.4-0.20220729051708-0a46a1624353/go.mod h1:LGl1HbaGIFxblk2o2nM53YSW5KN3jmjh4c5jpHMs7rc=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
github.com/360EntSecGroup-Skylar/excelize v1.4.1/go.mod h1:vnax29X2usfl7HHkBrX5EvSCJcmH3dT9luvxzu8iGAE=
|
||||
github.com/AlecAivazis/survey/v2 v2.1.1 h1:LEMbHE0pLj75faaVEKClEX1TM4AJmmnOh9eimREzLWI=
|
||||
@@ -633,8 +633,8 @@ github.com/emicklei/go-restful-openapi/v2 v2.3.0/go.mod h1:bs67E3SEVgSmB3qDuRLqp
|
||||
github.com/emicklei/go-restful/v3 v3.0.0-rc2/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
|
||||
github.com/emicklei/go-restful/v3 v3.8.0 h1:eCZ8ulSerjdAiaNpF7GxXIE7ZCMo1moN1qX+S609eVw=
|
||||
github.com/emicklei/go-restful/v3 v3.8.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
|
||||
github.com/emicklei/proto v1.6.15 h1:XbpwxmuOPrdES97FrSfpyy67SSCV/wBIKXqgJzh6hNw=
|
||||
github.com/emicklei/proto v1.6.15/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
|
||||
github.com/emicklei/proto v1.10.0 h1:pDGyFRVV5RvV+nkBK9iy3q67FBy9Xa7vwrOTE+g5aGw=
|
||||
github.com/emicklei/proto v1.10.0/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
|
||||
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
|
||||
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
|
||||
github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g=
|
||||
@@ -937,6 +937,7 @@ github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2V
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
|
||||
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/glog v1.0.0 h1:nfP3RFugxnNRyKgeWd4oI1nYvXpxrx8ck8ZrcizshdQ=
|
||||
github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4=
|
||||
github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20180513044358-24b0969c4cb7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
@@ -1505,8 +1506,9 @@ github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrk
|
||||
github.com/mitchellh/go-ps v1.0.0/go.mod h1:J4lOc8z8yJs6vUwklHw2XEIiT4z4C40KtWVN3nvg8Pg=
|
||||
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
|
||||
github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4=
|
||||
github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
|
||||
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
|
||||
github.com/mitchellh/hashstructure v0.0.0-20170609045927-2bca23e0e452 h1:hOY53G+kBFhbYFpRVxHl5eS7laP6B1+Cq+Z9Dry1iMU=
|
||||
github.com/mitchellh/hashstructure v0.0.0-20170609045927-2bca23e0e452/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1DutKwClXU/ABz6AQ=
|
||||
@@ -1821,6 +1823,8 @@ github.com/prometheus/prometheus v1.8.2-0.20200110114423-1e64d757f711/go.mod h1:
|
||||
github.com/prometheus/prometheus v1.8.2-0.20200507164740-ecee9c8abfd1/go.mod h1:S5n0C6tSgdnwWshBUceRx5G1OsjLv/EeZ9t3wIfEtsY=
|
||||
github.com/prometheus/prometheus v1.8.2-0.20200609102542-5d7e3e970602/go.mod h1:CwaXafRa0mm72de2GQWtfQxjGytbSKIGivWxQvjpRZs=
|
||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||
github.com/protocolbuffers/txtpbfmt v0.0.0-20220428173112-74888fd59c2b h1:zd/2RNzIRkoGGMjE+YIsZ85CnDIz672JK2F3Zl4vux4=
|
||||
github.com/protocolbuffers/txtpbfmt v0.0.0-20220428173112-74888fd59c2b/go.mod h1:KjY0wibdYKc4DYkerHSbguaf3JeIPGhNJBp2BNiFH78=
|
||||
github.com/pseudomuto/protoc-gen-doc v1.3.2/go.mod h1:y5+P6n3iGrbKG+9O04V5ld71in3v/bX88wUwgt+U8EA=
|
||||
github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q=
|
||||
github.com/qri-io/starlib v0.4.2-0.20200213133954-ff2e8cd5ef8d/go.mod h1:7DPO4domFU579Ga6E61sB9VFNaniPVwJP5C4bBCu3wA=
|
||||
@@ -1850,11 +1854,11 @@ github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
|
||||
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||
github.com/rogpeppe/go-internal v1.6.0/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||
github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
|
||||
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||
github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg=
|
||||
github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
|
||||
github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/rubenv/sql-migrate v0.0.0-20210614095031-55d5740dbbcc h1:BD7uZqkN8CpjJtN/tScAKiccBikU4dlqe/gNrkRaPY4=
|
||||
@@ -2293,7 +2297,6 @@ golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw=
|
||||
golang.org/x/exp v0.0.0-20200513190911-00229845015e/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw=
|
||||
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
@@ -2728,7 +2731,6 @@ golang.org/x/tools v0.0.0-20200509030707-2212a7e161a5/go.mod h1:EkVYQZoAsY45+roY
|
||||
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200603131246-cc40288be839/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200612220849-54c614fe050c/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200616133436-c1934b75d054/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
@@ -3077,8 +3079,9 @@ gopkg.in/yaml.v3 v3.0.0-20200121175148-a6ecf24a6d71/go.mod h1:K4uyk7z7BCEPqu6E+C
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200603094226-e3079894b1e8/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
|
||||
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
|
||||
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
|
||||
|
||||
@@ -589,6 +589,7 @@ func unmarshalToContent(content []byte) (fileContent *github.RepositoryContent,
|
||||
return nil, nil, fmt.Errorf("unmarshalling failed for both file and directory content: %s and %w", fileUnmarshalError, directoryUnmarshalError)
|
||||
}
|
||||
|
||||
// nolint:staticcheck
|
||||
func genAddonAPISchema(addonRes *UIData) error {
|
||||
param, err := utils2.PrepareParameterCue(addonRes.Name, addonRes.Parameters)
|
||||
if err != nil {
|
||||
|
||||
@@ -184,7 +184,7 @@ func testReaderFunc(t *testing.T, reader AsyncReader) {
|
||||
// test get ui data
|
||||
rName := "KubeVela"
|
||||
uiDataList, err := ListAddonUIDataFromReader(reader, registryMeta, rName, UIMetaOptions)
|
||||
assert.True(t, strings.Contains(err.Error(), "#parameter.example: preference mark not allowed at this position"))
|
||||
assert.True(t, strings.Contains(err.Error(), "preference mark not allowed at this position"))
|
||||
assert.Equal(t, 5, len(uiDataList))
|
||||
assert.Equal(t, uiDataList[0].RegistryName, rName)
|
||||
|
||||
|
||||
@@ -238,6 +238,7 @@ func (cmd *InitCmd) createURLComponent() error {
|
||||
}
|
||||
|
||||
// toCUEResourceString formats object to CUE string used in addons
|
||||
// nolint:staticcheck
|
||||
func toCUEResourceString(obj interface{}) (string, error) {
|
||||
r := cue.Runtime{}
|
||||
v, err := gocodec.New(&r, nil).Decode(obj)
|
||||
|
||||
@@ -41,6 +41,7 @@ import (
|
||||
"github.com/oam-dev/kubevela/pkg/oam"
|
||||
"github.com/oam-dev/kubevela/pkg/oam/util"
|
||||
addonutil "github.com/oam-dev/kubevela/pkg/utils/addon"
|
||||
verrors "github.com/oam-dev/kubevela/pkg/utils/errors"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -133,6 +134,10 @@ func (a addonCueTemplateRender) renderApp() (*v1beta1.Application, []*unstructur
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrap(err, "load app template with CUE files")
|
||||
}
|
||||
if v.Error() != nil {
|
||||
return nil, nil, errors.Wrap(v.Error(), "load app template with CUE files")
|
||||
}
|
||||
|
||||
outputContent, err := v.LookupValue(renderOutputCuePath)
|
||||
if err != nil {
|
||||
return nil, nil, errors.Wrap(err, "render app from output field from CUE")
|
||||
@@ -144,7 +149,7 @@ func (a addonCueTemplateRender) renderApp() (*v1beta1.Application, []*unstructur
|
||||
auxiliaryContent, err := v.LookupValue(renderAuxiliaryOutputsPath)
|
||||
if err != nil {
|
||||
// no outputs defined in app template, return normal data
|
||||
if isErrorCueRenderPathNotFound(err, renderAuxiliaryOutputsPath) {
|
||||
if verrors.IsCuePathNotFound(err) {
|
||||
return &app, res, nil
|
||||
}
|
||||
return nil, nil, errors.Wrap(err, "render app from output field from CUE")
|
||||
|
||||
@@ -97,7 +97,7 @@ myref: {
|
||||
},
|
||||
}
|
||||
app, _, err := render.renderApp()
|
||||
assert.Equal(t, err.Error(), `load app template with CUE files: reference "myref" not found`)
|
||||
assert.Equal(t, err.Error(), `load app template with CUE files: output.spec.components: reference "myref" not found`)
|
||||
assert.Nil(t, app)
|
||||
|
||||
addon.CUETemplates = []ElementFile{{Data: "package main\n" + resourceComponent1}}
|
||||
@@ -114,7 +114,7 @@ myref: {
|
||||
assert.Equal(t, len(app.Spec.Policies), 2)
|
||||
str, err = json.Marshal(app.Spec.Policies)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, strings.Contains(string(str), `"clusterLabelSelector":{}`))
|
||||
assert.Contains(t, string(str), `"clusterLabelSelector":{}`)
|
||||
|
||||
addon.Parameters = "package newp\n" + paraDefined
|
||||
addon.CUETemplates = []ElementFile{{Data: "package newp\n" + resourceComponent1}}
|
||||
@@ -133,13 +133,13 @@ myref: {
|
||||
addon.CUETemplates = []ElementFile{{Data: "package hello\n" + resourceComponent1}}
|
||||
addon.AppCueTemplate = ElementFile{Data: "package main\n" + appTemplate}
|
||||
_, _, err = render.renderApp()
|
||||
assert.Equal(t, err.Error(), `load app template with CUE files: reference "myref" not found`)
|
||||
assert.Equal(t, err.Error(), `load app template with CUE files: output.spec.components: reference "myref" not found`)
|
||||
|
||||
addon.CUETemplates = []ElementFile{{Data: "package hello\n" + resourceComponent1}}
|
||||
addon.Parameters = paraDefined
|
||||
addon.AppCueTemplate = ElementFile{Data: appTemplate}
|
||||
_, _, err = render.renderApp()
|
||||
assert.Equal(t, err.Error(), `load app template with CUE files: reference "myref" not found`)
|
||||
assert.Equal(t, err.Error(), `load app template with CUE files: output.spec.components: reference "myref" not found`)
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -441,10 +441,6 @@ func generateAnnotation(meta *Meta) map[string]string {
|
||||
return res
|
||||
}
|
||||
|
||||
func isErrorCueRenderPathNotFound(err error, path string) bool {
|
||||
return err.Error() == fmt.Sprintf("var(path=%s) not exist", path)
|
||||
}
|
||||
|
||||
func checkConflictDefs(ctx context.Context, k8sClient client.Client, defs []*unstructured.Unstructured, appName string) (map[string]string, error) {
|
||||
res := map[string]string{}
|
||||
for _, def := range defs {
|
||||
|
||||
@@ -23,7 +23,7 @@ import (
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/format"
|
||||
json2cue "cuelang.org/go/encoding/json"
|
||||
"github.com/crossplane/crossplane-runtime/pkg/fieldpath"
|
||||
@@ -499,21 +499,13 @@ func baseGenerateComponent(pCtx process.Context, wl *Workload, appName, ns strin
|
||||
if patcher := wl.Patch; patcher != nil {
|
||||
workload, auxiliaries := pCtx.Output()
|
||||
if p, err := patcher.LookupValue("workload"); err == nil {
|
||||
pi, err := model.NewOther(p.CueValue())
|
||||
if err != nil {
|
||||
return nil, errors.WithMessage(err, "patch workload")
|
||||
}
|
||||
if err := workload.Unify(pi); err != nil {
|
||||
if err := workload.Unify(p.CueValue()); err != nil {
|
||||
return nil, errors.WithMessage(err, "patch workload")
|
||||
}
|
||||
}
|
||||
for _, aux := range auxiliaries {
|
||||
if p, err := patcher.LookupByScript(fmt.Sprintf("traits[\"%s\"]", aux.Name)); err == nil && p.CueValue().Err() == nil {
|
||||
pi, err := model.NewOther(p.CueValue())
|
||||
if err != nil {
|
||||
return nil, errors.WithMessagef(err, "patch outputs.%s", aux.Name)
|
||||
}
|
||||
if err := aux.Ins.Unify(pi); err != nil {
|
||||
if err := aux.Ins.Unify(p.CueValue()); err != nil {
|
||||
return nil, errors.WithMessagef(err, "patch outputs.%s", aux.Name)
|
||||
}
|
||||
}
|
||||
@@ -618,20 +610,20 @@ func GenerateCUETemplate(wl *Workload) (string, error) {
|
||||
if err != nil {
|
||||
return templateStr, errors.Wrap(err, "cannot marshal kube object")
|
||||
}
|
||||
ins, err := json2cue.Decode(&cue.Runtime{}, "", objRaw)
|
||||
cuectx := cuecontext.New()
|
||||
expr, err := json2cue.Extract("", objRaw)
|
||||
if err != nil {
|
||||
return templateStr, errors.Wrap(err, "cannot decode object into CUE")
|
||||
return templateStr, errors.Wrap(err, "cannot extract object into CUE")
|
||||
}
|
||||
cueRaw, err := format.Node(ins.Value().Syntax())
|
||||
v := cuectx.BuildExpr(expr)
|
||||
cueRaw, err := format.Node(v.Syntax())
|
||||
if err != nil {
|
||||
return templateStr, errors.Wrap(err, "cannot format CUE")
|
||||
}
|
||||
|
||||
// NOTE a hack way to enable using CUE capabilities on KUBE schematic workload
|
||||
templateStr = fmt.Sprintf(`
|
||||
output: {
|
||||
%s
|
||||
}`, string(cueRaw))
|
||||
output: %s`, string(cueRaw))
|
||||
case types.HelmCategory:
|
||||
gv, err := schema.ParseGroupVersion(wl.FullTemplate.Reference.Definition.APIVersion)
|
||||
if err != nil {
|
||||
|
||||
@@ -22,7 +22,7 @@ import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"github.com/crossplane/crossplane-runtime/pkg/test"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
terraformtypes "github.com/oam-dev/terraform-controller/api/types/crossplane-runtime"
|
||||
@@ -1178,33 +1178,32 @@ spec:
|
||||
CapabilityCategory: oamtypes.KubeCategory,
|
||||
},
|
||||
expectData: `
|
||||
output: {
|
||||
apiVersion: "apps/v1"
|
||||
kind: "Deployment"
|
||||
spec: {
|
||||
selector: {
|
||||
matchLabels: {
|
||||
app: "nginx"
|
||||
}
|
||||
}
|
||||
template: {
|
||||
spec: {
|
||||
containers: [{
|
||||
name: "nginx"
|
||||
image: "nginx:1.14.0"
|
||||
}]
|
||||
ports: [{
|
||||
containerPort: 80
|
||||
}]
|
||||
}
|
||||
metadata: {
|
||||
labels: {
|
||||
output: {
|
||||
apiVersion: "apps/v1"
|
||||
kind: "Deployment"
|
||||
spec: {
|
||||
selector: {
|
||||
matchLabels: {
|
||||
app: "nginx"
|
||||
}
|
||||
}
|
||||
template: {
|
||||
metadata: {
|
||||
labels: {
|
||||
app: "nginx"
|
||||
}
|
||||
}
|
||||
spec: {
|
||||
containers: [{
|
||||
image: "nginx:1.14.0"
|
||||
name: "nginx"
|
||||
}]
|
||||
ports: [{
|
||||
containerPort: 80
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}`,
|
||||
hasError: false,
|
||||
}, "Kube workload with wrong template": {
|
||||
@@ -1297,14 +1296,14 @@ output: {
|
||||
errInfo: "unexpected GroupVersion string: app@//v1",
|
||||
}}
|
||||
|
||||
for _, tc := range testcases {
|
||||
for i, tc := range testcases {
|
||||
template, err := GenerateCUETemplate(tc.workload)
|
||||
assert.Equal(t, err != nil, tc.hasError)
|
||||
if tc.hasError {
|
||||
assert.Equal(t, tc.errInfo, err.Error())
|
||||
continue
|
||||
}
|
||||
assert.Equal(t, tc.expectData, template)
|
||||
assert.Equal(t, tc.expectData, template, i)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1395,11 +1394,9 @@ func TestBaseGenerateComponent(t *testing.T) {
|
||||
}
|
||||
}
|
||||
`
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", base)
|
||||
assert.NilError(t, err)
|
||||
inst := cuecontext.New().CompileString(base)
|
||||
bs, _ := model.NewBase(inst.Value())
|
||||
err = pContext.SetBase(bs)
|
||||
err := pContext.SetBase(bs)
|
||||
assert.NilError(t, err)
|
||||
tr := &Trait{
|
||||
Name: traitName,
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/format"
|
||||
"cuelang.org/go/encoding/openapi"
|
||||
"cuelang.org/go/encoding/yaml"
|
||||
@@ -83,14 +84,16 @@ func GetChartValuesJSONSchema(ctx context.Context, h *common.Helm) ([]byte, erro
|
||||
|
||||
// generateSchemaFromValues generate OpenAPIv3 schema based on Chart Values
|
||||
// file.
|
||||
// nolint:staticcheck
|
||||
func generateSchemaFromValues(values []byte) ([]byte, error) {
|
||||
valuesIdentifier := "values"
|
||||
r := cue.Runtime{}
|
||||
cuectx := cuecontext.New()
|
||||
// convert Values yaml to CUE
|
||||
ins, err := yaml.Decode(&r, "", string(values))
|
||||
file, err := yaml.Extract("", string(values))
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "cannot decode Values.yaml to CUE")
|
||||
return nil, errors.Wrap(err, "cannot extract Values.yaml to CUE")
|
||||
}
|
||||
ins := cuectx.BuildFile(file)
|
||||
// get the streamed CUE including the comments which will be used as
|
||||
// 'description' in the schema
|
||||
c, err := format.Node(ins.Value().Syntax(cue.Docs(true)), format.Simplify())
|
||||
@@ -101,18 +104,15 @@ func generateSchemaFromValues(values []byte) ([]byte, error) {
|
||||
// an identifier manually
|
||||
valuesStr := fmt.Sprintf("#%s:{\n%s\n}", valuesIdentifier, string(c))
|
||||
|
||||
r = cue.Runtime{}
|
||||
ins, err = r.Compile("-", valuesStr)
|
||||
r := cue.Runtime{}
|
||||
inst, err := r.Compile("-", valuesStr)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "cannot compile CUE generated from Values.yaml")
|
||||
}
|
||||
if ins.Err != nil {
|
||||
return nil, errors.Wrap(ins.Err, "cannot compile CUE generated from Values.yaml")
|
||||
}
|
||||
// generate OpenAPIv3 schema through cue openapi encoder
|
||||
rawSchema, err := openapi.Gen(ins, &openapi.Config{})
|
||||
rawSchema, err := openapi.Gen(inst, &openapi.Config{})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(ins.Err, "cannot generate OpenAPIv3 schema")
|
||||
return nil, errors.Wrap(err, "cannot generate OpenAPIv3 schema")
|
||||
}
|
||||
rawSchema, err = makeSwaggerCompatible(rawSchema)
|
||||
if err != nil {
|
||||
|
||||
15
pkg/appfile/helm/testdata/values.schema.json
vendored
15
pkg/appfile/helm/testdata/values.schema.json
vendored
@@ -140,6 +140,16 @@
|
||||
"type": "boolean"
|
||||
},
|
||||
"hosts": {
|
||||
"default":[
|
||||
{
|
||||
"host":"chart-example.local",
|
||||
"paths":[
|
||||
{
|
||||
"path":"/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"description": "kubernetes.io/ingress.class: nginx\nkubernetes.io/tls-acme: \"true\"",
|
||||
"items": {
|
||||
"properties": {
|
||||
@@ -148,6 +158,11 @@
|
||||
"type": "string"
|
||||
},
|
||||
"paths": {
|
||||
"default":[
|
||||
{
|
||||
"path":"/"
|
||||
}
|
||||
],
|
||||
"items": {
|
||||
"properties": {
|
||||
"path": {
|
||||
|
||||
@@ -20,7 +20,7 @@ import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"github.com/crossplane/crossplane-runtime/pkg/test"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -121,17 +121,8 @@ spec:
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", temp.TemplateStr)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
instDest, err := r.Compile("-", cueTemplate)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
inst := cuecontext.New().CompileString(temp.TemplateStr)
|
||||
instDest := cuecontext.New().CompileString(cueTemplate)
|
||||
s1, _ := inst.Value().String()
|
||||
s2, _ := instDest.Value().String()
|
||||
if s1 != s2 {
|
||||
@@ -226,17 +217,8 @@ spec:
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", temp.TemplateStr)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
instDest, err := r.Compile("-", cueTemplate)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
inst := cuecontext.New().CompileString(temp.TemplateStr)
|
||||
instDest := cuecontext.New().CompileString(cueTemplate)
|
||||
s1, _ := inst.Value().String()
|
||||
s2, _ := instDest.Value().String()
|
||||
if s1 != s2 {
|
||||
@@ -345,17 +327,8 @@ spec:
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", temp.TemplateStr)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
instDest, err := r.Compile("-", cueTemplate)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
inst := cuecontext.New().CompileString(temp.TemplateStr)
|
||||
instDest := cuecontext.New().CompileString(cueTemplate)
|
||||
s1, _ := inst.Value().String()
|
||||
s2, _ := instDest.Value().String()
|
||||
if s1 != s2 {
|
||||
|
||||
@@ -28,6 +28,7 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/oam-dev/kubevela/pkg/builtin/registry"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -55,8 +56,8 @@ func (c *HTTPCmd) Run(meta *registry.Meta) (res interface{}, err error) {
|
||||
Timeout: time.Second * 3,
|
||||
}
|
||||
)
|
||||
if obj := meta.Obj.Lookup("request"); obj.Exists() {
|
||||
if v := obj.Lookup("body"); v.Exists() {
|
||||
if obj := meta.Obj.LookupPath(value.FieldPath("request")); obj.Exists() {
|
||||
if v := obj.LookupPath(value.FieldPath("body")); v.Exists() {
|
||||
r, err = v.Reader()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -84,13 +85,13 @@ func (c *HTTPCmd) Run(meta *registry.Meta) (res interface{}, err error) {
|
||||
req.Header = header
|
||||
req.Trailer = trailer
|
||||
|
||||
if tlsConfig := meta.Obj.Lookup("tls_config"); tlsConfig.Exists() {
|
||||
if tlsConfig := meta.Obj.LookupPath(value.FieldPath("tls_config")); tlsConfig.Exists() {
|
||||
tr := &http.Transport{
|
||||
TLSClientConfig: &tls.Config{
|
||||
NextProtos: []string{"http/1.1"},
|
||||
},
|
||||
}
|
||||
ca := tlsConfig.Lookup("ca")
|
||||
ca := tlsConfig.LookupPath(value.FieldPath("ca"))
|
||||
if caCrt, err := ca.String(); err != nil {
|
||||
return nil, errors.WithMessage(err, "parse ca")
|
||||
} else {
|
||||
@@ -99,8 +100,8 @@ func (c *HTTPCmd) Run(meta *registry.Meta) (res interface{}, err error) {
|
||||
tr.TLSClientConfig.RootCAs = pool
|
||||
}
|
||||
|
||||
cert := tlsConfig.Lookup("client_crt")
|
||||
key := tlsConfig.Lookup("client_key")
|
||||
cert := tlsConfig.LookupPath(value.FieldPath("client_crt"))
|
||||
key := tlsConfig.LookupPath(value.FieldPath("client_key"))
|
||||
if cert.Exists() && key.Exists() {
|
||||
crtData, err := cert.String()
|
||||
if err != nil {
|
||||
@@ -136,7 +137,7 @@ func (c *HTTPCmd) Run(meta *registry.Meta) (res interface{}, err error) {
|
||||
}
|
||||
|
||||
func parseHeaders(obj cue.Value, label string) (http.Header, error) {
|
||||
m := obj.Lookup(label)
|
||||
m := obj.LookupPath(value.FieldPath(label))
|
||||
if !m.Exists() {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
@@ -28,10 +28,12 @@ import (
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"github.com/bmizerany/assert"
|
||||
|
||||
"github.com/oam-dev/kubevela/pkg/builtin/http/testdata"
|
||||
"github.com/oam-dev/kubevela/pkg/builtin/registry"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -70,11 +72,7 @@ func TestHTTPCmdRun(t *testing.T) {
|
||||
s := NewMock()
|
||||
defer s.Close()
|
||||
|
||||
r := cue.Runtime{}
|
||||
reqInst, err := r.Compile("", Req)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
reqInst := cuecontext.New().CompileString(Req)
|
||||
|
||||
runner, _ := newHTTPCmd(cue.Value{})
|
||||
got, err := runner.Run(®istry.Meta{Obj: reqInst.Value()})
|
||||
@@ -85,7 +83,7 @@ func TestHTTPCmdRun(t *testing.T) {
|
||||
|
||||
assert.Equal(t, "{\"token\":\"test-token\"}", body)
|
||||
|
||||
reqNoHeaderInst, err := r.Compile("", ReqWithoutHeader)
|
||||
reqNoHeaderInst := cuecontext.New().CompileString(ReqWithoutHeader)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@@ -103,15 +101,11 @@ func TestHTTPCmdRun(t *testing.T) {
|
||||
func TestHTTPSRun(t *testing.T) {
|
||||
s := newMockHttpsServer()
|
||||
defer s.Close()
|
||||
r := cue.Runtime{}
|
||||
reqInst, err := r.Compile("-", `method: "GET"
|
||||
reqInst := cuecontext.New().CompileString(`method: "GET"
|
||||
url: "https://127.0.0.1:8443/api/v1/token?val=test-token"`)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
reqInst, _ = reqInst.Fill(decodeCert(testdata.MockCerts.Ca), "tls_config", "ca")
|
||||
reqInst, _ = reqInst.Fill(decodeCert(testdata.MockCerts.ClientCrt), "tls_config", "client_crt")
|
||||
reqInst, _ = reqInst.Fill(decodeCert(testdata.MockCerts.ClientKey), "tls_config", "client_key")
|
||||
reqInst = reqInst.FillPath(value.FieldPath("tls_config", "ca"), decodeCert(testdata.MockCerts.Ca))
|
||||
reqInst = reqInst.FillPath(value.FieldPath("tls_config", "client_crt"), decodeCert(testdata.MockCerts.ClientCrt))
|
||||
reqInst = reqInst.FillPath(value.FieldPath("tls_config", "client_key"), decodeCert(testdata.MockCerts.ClientKey))
|
||||
|
||||
runner, _ := newHTTPCmd(cue.Value{})
|
||||
got, err := runner.Run(®istry.Meta{Obj: reqInst.Value()})
|
||||
|
||||
@@ -24,6 +24,8 @@ import (
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/errors"
|
||||
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
)
|
||||
|
||||
// Meta provides context for running a task.
|
||||
@@ -38,7 +40,7 @@ type Meta struct {
|
||||
|
||||
// Lookup fetches the value of context by filed
|
||||
func (m *Meta) Lookup(field string) cue.Value {
|
||||
f := m.Obj.Lookup(field)
|
||||
f := m.Obj.LookupPath(value.FieldPath(field))
|
||||
if !f.Exists() {
|
||||
m.Err = fmt.Errorf("invalid lookup argument")
|
||||
return cue.Value{}
|
||||
@@ -51,7 +53,7 @@ func (m *Meta) Lookup(field string) cue.Value {
|
||||
|
||||
// Int64 fetch the value formatted int64 of context by filed
|
||||
func (m *Meta) Int64(field string) int64 {
|
||||
f := m.Obj.Lookup(field)
|
||||
f := m.Obj.LookupPath(value.FieldPath(field))
|
||||
value, err := f.Int64()
|
||||
if err != nil {
|
||||
m.Err = fmt.Errorf("invalid int64 argument, %w", err)
|
||||
@@ -63,7 +65,7 @@ func (m *Meta) Int64(field string) int64 {
|
||||
|
||||
// String fetch the value formatted string of context by filed
|
||||
func (m *Meta) String(field string) string {
|
||||
f := m.Obj.Lookup(field)
|
||||
f := m.Obj.LookupPath(value.FieldPath(field))
|
||||
value, err := f.String()
|
||||
if err != nil {
|
||||
m.Err = fmt.Errorf("invalid string argument, %w", err)
|
||||
@@ -74,7 +76,7 @@ func (m *Meta) String(field string) string {
|
||||
|
||||
// Bytes fetch the value formatted bytes of context by filed
|
||||
func (m *Meta) Bytes(field string) []byte {
|
||||
f := m.Obj.Lookup(field)
|
||||
f := m.Obj.LookupPath(value.FieldPath(field))
|
||||
value, err := f.Bytes()
|
||||
if err != nil {
|
||||
m.Err = fmt.Errorf("invalid bytes argument, %w", err)
|
||||
|
||||
@@ -20,28 +20,19 @@ import (
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"github.com/bmizerany/assert"
|
||||
)
|
||||
|
||||
func TestContext(t *testing.T) {
|
||||
var r cue.Runtime
|
||||
|
||||
lpV := `test: "just a test"`
|
||||
inst, err := r.Compile("lp", lpV)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
ctx := Meta{Obj: inst.Value()}
|
||||
inst := cuecontext.New().CompileString(lpV)
|
||||
ctx := Meta{Obj: inst}
|
||||
val := ctx.Lookup("test")
|
||||
assert.Equal(t, true, val.Exists())
|
||||
|
||||
intV := `iTest: 64`
|
||||
iInst, err := r.Compile("int", intV)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
iInst := cuecontext.New().CompileString(intV)
|
||||
iCtx := Meta{Obj: iInst.Value()}
|
||||
iVal := iCtx.Int64("iTest")
|
||||
assert.Equal(t, int64(64), iVal)
|
||||
|
||||
@@ -20,9 +20,13 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"path/filepath"
|
||||
sysruntime "runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@@ -465,15 +469,8 @@ var _ = Describe("Test Application Controller", func() {
|
||||
importWdJson, _ := yaml.YAMLToJSON([]byte(wDImportYaml))
|
||||
|
||||
importTd := &v1alpha2.TraitDefinition{}
|
||||
importGateway := &v1alpha2.TraitDefinition{}
|
||||
importStorage := &v1alpha2.TraitDefinition{}
|
||||
|
||||
importEnv := &v1alpha2.TraitDefinition{}
|
||||
|
||||
importHubCpuScaler := &v1beta1.TraitDefinition{}
|
||||
|
||||
importPodAffinity := &v1beta1.TraitDefinition{}
|
||||
|
||||
webserverwd := &v1alpha2.ComponentDefinition{}
|
||||
webserverwdJson, _ := yaml.YAMLToJSON([]byte(webComponentDefYaml))
|
||||
|
||||
@@ -505,31 +502,18 @@ var _ = Describe("Test Application Controller", func() {
|
||||
Expect(json.Unmarshal(importTdJson, importTd)).Should(BeNil())
|
||||
Expect(k8sClient.Create(ctx, importTd.DeepCopy())).Should(SatisfyAny(BeNil(), &util.AlreadyExistMatcher{}))
|
||||
|
||||
gatewayJson, gatewayErr := yaml.YAMLToJSON([]byte(gatewayYaml))
|
||||
Expect(gatewayErr).ShouldNot(HaveOccurred())
|
||||
Expect(json.Unmarshal(gatewayJson, importGateway)).Should(BeNil())
|
||||
Expect(k8sClient.Create(ctx, importGateway.DeepCopy())).Should(SatisfyAny(BeNil(), &util.AlreadyExistMatcher{}))
|
||||
traitList := []string{"gateway", "storage", "env", "affinity"}
|
||||
for _, trait := range traitList {
|
||||
installDefinition(ctx, "../../../../charts/vela-core/templates/defwithtemplate", trait)
|
||||
}
|
||||
|
||||
storageJson, storageErr := yaml.YAMLToJSON([]byte(storageYaml))
|
||||
Expect(storageErr).ShouldNot(HaveOccurred())
|
||||
Expect(json.Unmarshal(storageJson, importStorage)).Should(BeNil())
|
||||
Expect(k8sClient.Create(ctx, importStorage.DeepCopy())).Should(SatisfyAny(BeNil(), &util.AlreadyExistMatcher{}))
|
||||
|
||||
envJson, envErr := yaml.YAMLToJSON([]byte(envYaml))
|
||||
Expect(envErr).ShouldNot(HaveOccurred())
|
||||
Expect(json.Unmarshal(envJson, importEnv)).Should(BeNil())
|
||||
Expect(k8sClient.Create(ctx, importEnv.DeepCopy())).Should(SatisfyAny(BeNil(), &util.AlreadyExistMatcher{}))
|
||||
installDefinition(ctx, "./application/testdata/definitions", "panic")
|
||||
|
||||
hubCpuScalerJson, hubCpuScalerErr := yaml.YAMLToJSON([]byte(hubCpuScalerYaml))
|
||||
Expect(hubCpuScalerErr).ShouldNot(HaveOccurred())
|
||||
Expect(json.Unmarshal(hubCpuScalerJson, importHubCpuScaler)).Should(BeNil())
|
||||
Expect(k8sClient.Create(ctx, importHubCpuScaler.DeepCopy())).Should(SatisfyAny(BeNil(), &util.AlreadyExistMatcher{}))
|
||||
|
||||
affinityJson, podAffinityErr := yaml.YAMLToJSON([]byte(affinityYaml))
|
||||
Expect(podAffinityErr).ShouldNot(HaveOccurred())
|
||||
Expect(json.Unmarshal(affinityJson, importPodAffinity)).Should(BeNil())
|
||||
Expect(k8sClient.Create(ctx, importPodAffinity.DeepCopy())).Should(SatisfyAny(BeNil(), &util.AlreadyExistMatcher{}))
|
||||
|
||||
Expect(json.Unmarshal(tDDefJson, td)).Should(BeNil())
|
||||
Expect(k8sClient.Create(ctx, td.DeepCopy())).Should(SatisfyAny(BeNil(), &util.AlreadyExistMatcher{}))
|
||||
|
||||
@@ -4711,8 +4695,52 @@ var _ = Describe("Test Application Controller", func() {
|
||||
|
||||
Expect(k8sClient.Delete(ctx, app)).Should(BeNil())
|
||||
})
|
||||
|
||||
It("test cue panic", func() {
|
||||
ns := &corev1.Namespace{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: "cue-panic",
|
||||
},
|
||||
}
|
||||
Expect(k8sClient.Create(ctx, ns)).Should(BeNil())
|
||||
|
||||
appWithMountToEnvs.SetNamespace(ns.Name)
|
||||
app := appWithMountToEnvs.DeepCopy()
|
||||
app.Spec.Components[0].Traits = []common.ApplicationTrait{
|
||||
{
|
||||
Type: "panic",
|
||||
Properties: &runtime.RawExtension{Raw: []byte("{\"configMap\": [{\"name\": \"myweb-cm\"}]}")},
|
||||
},
|
||||
}
|
||||
Expect(k8sClient.Create(ctx, app)).Should(BeNil())
|
||||
|
||||
appKey := client.ObjectKey{
|
||||
Name: app.Name,
|
||||
Namespace: app.Namespace,
|
||||
}
|
||||
testutil.ReconcileOnceAfterFinalizer(reconciler, reconcile.Request{NamespacedName: appKey})
|
||||
|
||||
curApp := &v1beta1.Application{}
|
||||
Expect(k8sClient.Get(ctx, appKey, curApp)).Should(BeNil())
|
||||
Expect(curApp.Status.Phase).Should(Equal(common.ApplicationRunningWorkflow))
|
||||
Expect(curApp.Status.Workflow.Steps[0].Message).Should(ContainSubstring("invalid cue task for evaluation: runtime error: invalid memory address or nil pointer dereference"))
|
||||
})
|
||||
})
|
||||
|
||||
func installDefinition(ctx context.Context, defPath, name string) {
|
||||
_, file, _, _ := sysruntime.Caller(0)
|
||||
definitionPath := filepath.Join(filepath.Dir(filepath.Dir(file)), defPath)
|
||||
|
||||
b, err := ioutil.ReadFile(filepath.Join(definitionPath, name+".yaml"))
|
||||
Expect(err).ShouldNot(HaveOccurred())
|
||||
s := strings.ReplaceAll(string(b), `{{ include "systemDefinitionNamespace" . }}`, "vela-system")
|
||||
defJson, defErr := yaml.YAMLToJSON([]byte(s))
|
||||
Expect(defErr).ShouldNot(HaveOccurred())
|
||||
u := &unstructured.Unstructured{}
|
||||
Expect(json.Unmarshal(defJson, u)).Should(BeNil())
|
||||
Expect(k8sClient.Create(ctx, u.DeepCopy())).Should(SatisfyAny(BeNil(), &util.AlreadyExistMatcher{}))
|
||||
}
|
||||
|
||||
const (
|
||||
scopeDefYaml = `apiVersion: core.oam.dev/v1beta1
|
||||
kind: ScopeDefinition
|
||||
@@ -4743,15 +4771,6 @@ spec:
|
||||
output: {
|
||||
apiVersion: "apps/v1"
|
||||
kind: "Deployment"
|
||||
metadata: {
|
||||
annotations: {
|
||||
if context["config"] != _|_ {
|
||||
for _, v in context.config {
|
||||
"\(v.name)" : v.value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
spec: {
|
||||
selector: matchLabels: {
|
||||
"app.oam.dev/component": context.name
|
||||
@@ -5135,106 +5154,7 @@ spec:
|
||||
cmd?: [...string]
|
||||
}
|
||||
`
|
||||
gatewayYaml = `apiVersion: core.oam.dev/v1beta1
|
||||
kind: TraitDefinition
|
||||
metadata:
|
||||
annotations:
|
||||
definition.oam.dev/description: Enable public web traffic for the component, the ingress API matches K8s v1.20+.
|
||||
name: gateway
|
||||
namespace: vela-system
|
||||
spec:
|
||||
appliesToWorkloads:
|
||||
- '*'
|
||||
podDisruptive: false
|
||||
schematic:
|
||||
cue:
|
||||
template: |
|
||||
// trait template can have multiple outputs in one trait
|
||||
outputs: service: {
|
||||
apiVersion: "v1"
|
||||
kind: "Service"
|
||||
metadata: name: context.name
|
||||
spec: {
|
||||
selector: "app.oam.dev/component": context.name
|
||||
ports: [
|
||||
for k, v in parameter.http {
|
||||
port: v
|
||||
targetPort: v
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
outputs: ingress: {
|
||||
apiVersion: "networking.k8s.io/v1"
|
||||
kind: "Ingress"
|
||||
metadata: {
|
||||
name: context.name
|
||||
annotations: {
|
||||
if !parameter.classInSpec {
|
||||
"kubernetes.io/ingress.class": parameter.class
|
||||
}
|
||||
}
|
||||
}
|
||||
spec: {
|
||||
if parameter.classInSpec {
|
||||
ingressClassName: parameter.class
|
||||
}
|
||||
if parameter.secretName != _|_ {
|
||||
tls: [{
|
||||
hosts: [
|
||||
parameter.domain,
|
||||
]
|
||||
secretName: parameter.secretName
|
||||
}]
|
||||
}
|
||||
rules: [{
|
||||
host: parameter.domain
|
||||
http: paths: [
|
||||
for k, v in parameter.http {
|
||||
path: k
|
||||
pathType: "ImplementationSpecific"
|
||||
backend: service: {
|
||||
name: context.name
|
||||
port: number: v
|
||||
}
|
||||
},
|
||||
]
|
||||
}]
|
||||
}
|
||||
}
|
||||
parameter: {
|
||||
// +usage=Specify the domain you want to expose
|
||||
domain: string
|
||||
|
||||
// +usage=Specify the mapping relationship between the http path and the workload port
|
||||
http: [string]: int
|
||||
|
||||
// +usage=Specify the class of ingress to use
|
||||
class: *"nginx" | string
|
||||
|
||||
// +usage=Set ingress class in '.spec.ingressClassName' instead of 'kubernetes.io/ingress.class' annotation.
|
||||
classInSpec: *false | bool
|
||||
|
||||
// +usage=Specify the secret name you want to quote.
|
||||
secretName?: string
|
||||
}
|
||||
status:
|
||||
customStatus: |-
|
||||
let igs = context.outputs.ingress.status.loadBalancer.ingress
|
||||
if igs == _|_ {
|
||||
message: "No loadBalancer found, visiting by using 'vela port-forward " + context.appName + "'\n"
|
||||
}
|
||||
if len(igs) > 0 {
|
||||
if igs[0].ip != _|_ {
|
||||
message: "Visiting URL: " + context.outputs.ingress.spec.rules[0].host + ", IP: " + igs[0].ip
|
||||
}
|
||||
if igs[0].ip == _|_ {
|
||||
message: "Visiting URL: " + context.outputs.ingress.spec.rules[0].host
|
||||
}
|
||||
}
|
||||
healthPolicy: 'isHealth: len(context.outputs.service.spec.clusterIP) > 0'
|
||||
|
||||
`
|
||||
cdDefWithHealthStatusYaml = `apiVersion: core.oam.dev/v1beta1
|
||||
kind: ComponentDefinition
|
||||
metadata:
|
||||
@@ -5758,424 +5678,6 @@ spec:
|
||||
}
|
||||
parameter: parallelism: int
|
||||
|
||||
`
|
||||
|
||||
storageYaml = `apiVersion: core.oam.dev/v1beta1
|
||||
kind: TraitDefinition
|
||||
metadata:
|
||||
annotations:
|
||||
definition.oam.dev/description: Add storages on K8s pod for your workload which follows the pod spec in path 'spec.template'.
|
||||
name: storage
|
||||
namespace: vela-system
|
||||
spec:
|
||||
appliesToWorkloads:
|
||||
- deployments.apps
|
||||
podDisruptive: true
|
||||
schematic:
|
||||
cue:
|
||||
template: |
|
||||
pvcVolumesList: *[
|
||||
for v in parameter.pvc {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
persistentVolumeClaim: claimName: v.name
|
||||
}
|
||||
},
|
||||
] | []
|
||||
configMapVolumesList: *[
|
||||
for v in parameter.configMap if v.mountPath != _|_ {
|
||||
{
|
||||
name: "configmap-" + v.name
|
||||
configMap: {
|
||||
defaultMode: v.defaultMode
|
||||
name: v.name
|
||||
if v.items != _|_ {
|
||||
items: v.items
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
] | []
|
||||
secretVolumesList: *[
|
||||
for v in parameter.secret if v.mountPath != _|_ {
|
||||
{
|
||||
name: "secret-" + v.name
|
||||
secret: {
|
||||
defaultMode: v.defaultMode
|
||||
secretName: v.name
|
||||
if v.items != _|_ {
|
||||
items: v.items
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
] | []
|
||||
emptyDirVolumesList: *[
|
||||
for v in parameter.emptyDir {
|
||||
{
|
||||
name: "emptydir-" + v.name
|
||||
emptyDir: medium: v.medium
|
||||
}
|
||||
},
|
||||
] | []
|
||||
pvcVolumeMountsList: *[
|
||||
for v in parameter.pvc {
|
||||
if v.volumeMode == "Filesystem" {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
mountPath: v.mountPath
|
||||
}
|
||||
}
|
||||
},
|
||||
] | []
|
||||
configMapVolumeMountsList: *[
|
||||
for v in parameter.configMap if v.mountPath != _|_ {
|
||||
{
|
||||
name: "configmap-" + v.name
|
||||
mountPath: v.mountPath
|
||||
}
|
||||
},
|
||||
] | []
|
||||
configMapEnvMountsList: *[
|
||||
for v in parameter.configMap if v.mountToEnv != _|_ {
|
||||
{
|
||||
name: v.mountToEnv.envName
|
||||
valueFrom: configMapKeyRef: {
|
||||
name: v.name
|
||||
key: v.mountToEnv.configMapKey
|
||||
}
|
||||
}
|
||||
},
|
||||
] | []
|
||||
configMapMountToEnvsList: *[
|
||||
for v in parameter.configMap if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
{
|
||||
name: k.envName
|
||||
valueFrom: configMapKeyRef: {
|
||||
name: v.name
|
||||
key: k.configMapKey
|
||||
}
|
||||
}
|
||||
},
|
||||
] | []
|
||||
secretVolumeMountsList: *[
|
||||
for v in parameter.secret if v.mountPath != _|_ {
|
||||
{
|
||||
name: "secret-" + v.name
|
||||
mountPath: v.mountPath
|
||||
}
|
||||
},
|
||||
] | []
|
||||
secretEnvMountsList: *[
|
||||
for v in parameter.secret if v.mountToEnv != _|_ {
|
||||
{
|
||||
name: v.mountToEnv.envName
|
||||
valueFrom: secretKeyRef: {
|
||||
name: v.name
|
||||
key: v.mountToEnv.secretKey
|
||||
}
|
||||
}
|
||||
},
|
||||
] | []
|
||||
secretMountToEnvsList: *[
|
||||
for v in parameter.secret if v.mountToEnvs != _|_ for k in v.mountToEnvs {
|
||||
{
|
||||
name: k.envName
|
||||
valueFrom: secretKeyRef: {
|
||||
name: v.name
|
||||
key: k.secretKey
|
||||
}
|
||||
}
|
||||
},
|
||||
] | []
|
||||
emptyDirVolumeMountsList: *[
|
||||
for v in parameter.emptyDir {
|
||||
{
|
||||
name: "emptydir-" + v.name
|
||||
mountPath: v.mountPath
|
||||
}
|
||||
},
|
||||
] | []
|
||||
volumeDevicesList: *[
|
||||
for v in parameter.pvc if v.volumeMode == "Block" {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
devicePath: v.mountPath
|
||||
}
|
||||
},
|
||||
] | []
|
||||
patch: spec: template: spec: {
|
||||
// +patchKey=name
|
||||
volumes: pvcVolumesList + configMapVolumesList + secretVolumesList + emptyDirVolumesList
|
||||
|
||||
containers: [{
|
||||
// +patchKey=name
|
||||
env: configMapEnvMountsList + secretEnvMountsList + configMapMountToEnvsList + secretMountToEnvsList
|
||||
// +patchKey=name
|
||||
volumeDevices: volumeDevicesList
|
||||
// +patchKey=name
|
||||
volumeMounts: pvcVolumeMountsList + configMapVolumeMountsList + secretVolumeMountsList + emptyDirVolumeMountsList
|
||||
},...]
|
||||
|
||||
}
|
||||
outputs: {
|
||||
for v in parameter.pvc {
|
||||
if v.mountOnly == false {
|
||||
"pvc-\(v.name)": {
|
||||
apiVersion: "v1"
|
||||
kind: "PersistentVolumeClaim"
|
||||
metadata: name: v.name
|
||||
spec: {
|
||||
accessModes: v.accessModes
|
||||
volumeMode: v.volumeMode
|
||||
if v.volumeName != _|_ {
|
||||
volumeName: v.volumeName
|
||||
}
|
||||
if v.storageClassName != _|_ {
|
||||
storageClassName: v.storageClassName
|
||||
}
|
||||
|
||||
if v.resources.requests.storage == _|_ {
|
||||
resources: requests: storage: "8Gi"
|
||||
}
|
||||
if v.resources.requests.storage != _|_ {
|
||||
resources: requests: storage: v.resources.requests.storage
|
||||
}
|
||||
if v.resources.limits.storage != _|_ {
|
||||
resources: limits: storage: v.resources.limits.storage
|
||||
}
|
||||
if v.dataSourceRef != _|_ {
|
||||
dataSourceRef: v.dataSourceRef
|
||||
}
|
||||
if v.dataSource != _|_ {
|
||||
dataSource: v.dataSource
|
||||
}
|
||||
if v.selector != _|_ {
|
||||
dataSource: v.selector
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for v in parameter.configMap {
|
||||
if v.mountOnly == false {
|
||||
"configmap-\(v.name)": {
|
||||
apiVersion: "v1"
|
||||
kind: "ConfigMap"
|
||||
metadata: name: v.name
|
||||
if v.data != _|_ {
|
||||
data: v.data
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for v in parameter.secret {
|
||||
if v.mountOnly == false {
|
||||
"secret-\(v.name)": {
|
||||
apiVersion: "v1"
|
||||
kind: "Secret"
|
||||
metadata: name: v.name
|
||||
if v.data != _|_ {
|
||||
data: v.data
|
||||
}
|
||||
if v.stringData != _|_ {
|
||||
stringData: v.stringData
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
parameter: {
|
||||
// +usage=Declare pvc type storage
|
||||
pvc?: [...{
|
||||
name: string
|
||||
mountOnly: *false | bool
|
||||
mountPath: string
|
||||
volumeMode: *"Filesystem" | string
|
||||
volumeName?: string
|
||||
accessModes: *["ReadWriteOnce"] | [...string]
|
||||
storageClassName?: string
|
||||
resources?: {
|
||||
requests: storage: =~"^([1-9][0-9]{0,63})(E|P|T|G|M|K|Ei|Pi|Ti|Gi|Mi|Ki)$"
|
||||
limits?: storage: =~"^([1-9][0-9]{0,63})(E|P|T|G|M|K|Ei|Pi|Ti|Gi|Mi|Ki)$"
|
||||
}
|
||||
dataSourceRef?: {
|
||||
name: string
|
||||
kind: string
|
||||
apiGroup: string
|
||||
}
|
||||
dataSource?: {
|
||||
name: string
|
||||
kind: string
|
||||
apiGroup: string
|
||||
}
|
||||
selector?: {
|
||||
matchLabels?: [string]: string
|
||||
matchExpressions?: {
|
||||
key: string
|
||||
values: [...string]
|
||||
operator: string
|
||||
}
|
||||
}
|
||||
}]
|
||||
|
||||
// +usage=Declare config map type storage
|
||||
configMap?: [...{
|
||||
name: string
|
||||
mountOnly: *false | bool
|
||||
mountToEnv?: {
|
||||
envName: string
|
||||
configMapKey: string
|
||||
}
|
||||
mountToEnvs?: [...{
|
||||
envName: string
|
||||
configMapKey: string
|
||||
}]
|
||||
mountPath?: string
|
||||
defaultMode: *420 | int
|
||||
readOnly: *false | bool
|
||||
data?: {...}
|
||||
items?: [...{
|
||||
key: string
|
||||
path: string
|
||||
mode: *511 | int
|
||||
}]
|
||||
}]
|
||||
|
||||
// +usage=Declare secret type storage
|
||||
secret?: [...{
|
||||
name: string
|
||||
mountOnly: *false | bool
|
||||
mountToEnv?: {
|
||||
envName: string
|
||||
secretKey: string
|
||||
}
|
||||
mountToEnvs?: [...{
|
||||
envName: string
|
||||
secretKey: string
|
||||
}]
|
||||
mountPath?: string
|
||||
defaultMode: *420 | int
|
||||
readOnly: *false | bool
|
||||
stringData?: {...}
|
||||
data?: {...}
|
||||
items?: [...{
|
||||
key: string
|
||||
path: string
|
||||
mode: *511 | int
|
||||
}]
|
||||
}]
|
||||
|
||||
// +usage=Declare empty dir type storage
|
||||
emptyDir?: [...{
|
||||
name: string
|
||||
mountPath: string
|
||||
medium: *"" | "Memory"
|
||||
}]
|
||||
}
|
||||
`
|
||||
|
||||
envYaml = `apiVersion: core.oam.dev/v1beta1
|
||||
kind: TraitDefinition
|
||||
metadata:
|
||||
annotations:
|
||||
definition.oam.dev/description: Add env on K8s pod for your workload which follows the pod spec in path 'spec.template'
|
||||
labels:
|
||||
custom.definition.oam.dev/ui-hidden: "true"
|
||||
name: env
|
||||
namespace: vela-system
|
||||
spec:
|
||||
appliesToWorkloads:
|
||||
- '*'
|
||||
schematic:
|
||||
cue:
|
||||
template: |
|
||||
#PatchParams: {
|
||||
// +usage=Specify the name of the target container, if not set, use the component name
|
||||
containerName: *"" | string
|
||||
// +usage=Specify if replacing the whole environment settings for the container
|
||||
replace: *false | bool
|
||||
// +usage=Specify the environment variables to merge, if key already existing, override its value
|
||||
env: [string]: string
|
||||
// +usage=Specify which existing environment variables to unset
|
||||
unset: *[] | [...string]
|
||||
}
|
||||
PatchContainer: {
|
||||
_params: #PatchParams
|
||||
name: _params.containerName
|
||||
_delKeys: {for k in _params.unset {"\(k)": ""}}
|
||||
_baseContainers: context.output.spec.template.spec.containers
|
||||
_matchContainers_: [ for _container_ in _baseContainers if _container_.name == name {_container_}]
|
||||
_baseContainer: *_|_ | {...}
|
||||
if len(_matchContainers_) == 0 {
|
||||
err: "container \(name) not found"
|
||||
}
|
||||
if len(_matchContainers_) > 0 {
|
||||
_baseContainer: _matchContainers_[0]
|
||||
_baseEnv: _baseContainer.env
|
||||
if _baseEnv == _|_ {
|
||||
// +patchStrategy=replace
|
||||
env: [ for k, v in _params.env if _delKeys[k] == _|_ {
|
||||
name: k
|
||||
value: v
|
||||
}]
|
||||
}
|
||||
if _baseEnv != _|_ {
|
||||
_baseEnvMap: {for envVar in _baseEnv {"\(envVar.name)": envVar.value}}
|
||||
// +patchStrategy=replace
|
||||
env: [ for envVar in _baseEnv if _delKeys[envVar.name] == _|_ && !_params.replace {
|
||||
name: envVar.name
|
||||
if _params.env[envVar.name] != _|_ {
|
||||
value: _params.env[envVar.name]
|
||||
}
|
||||
if _params.env[envVar.name] == _|_ {
|
||||
value: envVar.value
|
||||
}
|
||||
}] + [ for k, v in _params.env if _delKeys[k] == _|_ && (_params.replace || _baseEnvMap[k] == _|_) {
|
||||
name: k
|
||||
value: v
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
patch: spec: template: spec: {
|
||||
if parameter.containers == _|_ {
|
||||
// +patchKey=name
|
||||
containers: [{
|
||||
PatchContainer & {_params: {
|
||||
if parameter.containerName == "" {
|
||||
containerName: context.name
|
||||
}
|
||||
if parameter.containerName != "" {
|
||||
containerName: parameter.containerName
|
||||
}
|
||||
replace: parameter.replace
|
||||
env: parameter.env
|
||||
unset: parameter.unset
|
||||
}}
|
||||
}]
|
||||
}
|
||||
if parameter.containers != _|_ {
|
||||
// +patchKey=name
|
||||
containers: [ for c in parameter.containers {
|
||||
if c.containerName == "" {
|
||||
err: "containerName must be set for containers"
|
||||
}
|
||||
if c.containerName != "" {
|
||||
PatchContainer & {_params: c}
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
parameter: *#PatchParams | close({
|
||||
// +usage=Specify the environment variables for multiple containers
|
||||
containers: [...#PatchParams]
|
||||
})
|
||||
errs: [ for c in patch.spec.template.spec.containers if c.err != _|_ {c.err}]
|
||||
|
||||
`
|
||||
|
||||
hubCpuScalerYaml = `apiVersion: core.oam.dev/v1beta1
|
||||
@@ -6221,190 +5723,6 @@ spec:
|
||||
// +usage=Specify the kind of scale target
|
||||
targetKind: *"Deployment" | string
|
||||
}
|
||||
`
|
||||
affinityYaml = `apiVersion: core.oam.dev/v1beta1
|
||||
kind: TraitDefinition
|
||||
metadata:
|
||||
annotations:
|
||||
definition.oam.dev/description: affinity specify affinity and tolerationon K8s pod for your workload which follows the pod spec in path 'spec.template'.
|
||||
labels:
|
||||
custom.definition.oam.dev/ui-hidden: "true"
|
||||
name: affinity
|
||||
namespace: vela-system
|
||||
spec:
|
||||
appliesToWorkloads:
|
||||
- '*'
|
||||
podDisruptive: true
|
||||
schematic:
|
||||
cue:
|
||||
template: |
|
||||
patch: spec: template: spec: {
|
||||
if parameter.podAffinity != _|_ {
|
||||
affinity: podAffinity: {
|
||||
if parameter.podAffinity.required != _|_ {
|
||||
requiredDuringSchedulingIgnoredDuringExecution: [
|
||||
for k in parameter.podAffinity.required {
|
||||
if k.labelSelector != _|_ {
|
||||
labelSelector: k.labelSelector
|
||||
}
|
||||
if k.namespace != _|_ {
|
||||
namespace: k.namespace
|
||||
}
|
||||
topologyKey: k.topologyKey
|
||||
if k.namespaceSelector != _|_ {
|
||||
namespaceSelector: k.namespaceSelector
|
||||
}
|
||||
}]
|
||||
}
|
||||
if parameter.podAffinity.preferred != _|_ {
|
||||
preferredDuringSchedulingIgnoredDuringExecution: [
|
||||
for k in parameter.podAffinity.preferred {
|
||||
weight: k.weight
|
||||
podAffinityTerm: k.podAffinityTerm
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
if parameter.podAntiAffinity != _|_ {
|
||||
affinity: podAntiAffinity: {
|
||||
if parameter.podAntiAffinity.required != _|_ {
|
||||
requiredDuringSchedulingIgnoredDuringExecution: [
|
||||
for k in parameter.podAntiAffinity.required {
|
||||
if k.labelSelector != _|_ {
|
||||
labelSelector: k.labelSelector
|
||||
}
|
||||
if k.namespace != _|_ {
|
||||
namespace: k.namespace
|
||||
}
|
||||
topologyKey: k.topologyKey
|
||||
if k.namespaceSelector != _|_ {
|
||||
namespaceSelector: k.namespaceSelector
|
||||
}
|
||||
}]
|
||||
}
|
||||
if parameter.podAntiAffinity.preferred != _|_ {
|
||||
preferredDuringSchedulingIgnoredDuringExecution: [
|
||||
for k in parameter.podAntiAffinity.preferred {
|
||||
weight: k.weight
|
||||
podAffinityTerm: k.podAffinityTerm
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
if parameter.nodeAffinity != _|_ {
|
||||
affinity: nodeAffinity: {
|
||||
if parameter.nodeAffinity.required != _|_ {
|
||||
requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: [
|
||||
for k in parameter.nodeAffinity.required.nodeSelectorTerms {
|
||||
if k.matchExpressions != _|_ {
|
||||
matchExpressions: k.matchExpressions
|
||||
}
|
||||
if k.matchFields != _|_ {
|
||||
matchFields: k.matchFields
|
||||
}
|
||||
}]
|
||||
}
|
||||
if parameter.nodeAffinity.preferred != _|_ {
|
||||
preferredDuringSchedulingIgnoredDuringExecution: [
|
||||
for k in parameter.nodeAffinity.preferred {
|
||||
weight: k.weight
|
||||
preference: k.preference
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
if parameter.tolerations != _|_ {
|
||||
tolerations: [
|
||||
for k in parameter.tolerations {
|
||||
if k.key != _|_ {
|
||||
key: k.key
|
||||
}
|
||||
if k.effect != _|_ {
|
||||
effect: k.effect
|
||||
}
|
||||
if k.value != _|_ {
|
||||
value: k.value
|
||||
}
|
||||
operator: k.operator
|
||||
if k.tolerationSeconds != _|_ {
|
||||
tolerationSeconds: k.tolerationSeconds
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
#labelSelector: {
|
||||
matchLabels?: [string]: string
|
||||
matchExpressions?: [...{
|
||||
key: string
|
||||
operator: *"In" | "NotIn" | "Exists" | "DoesNotExist"
|
||||
values?: [...string]
|
||||
}]
|
||||
}
|
||||
#podAffinityTerm: {
|
||||
labelSelector?: #labelSelector
|
||||
namespaces?: [...string]
|
||||
topologyKey: string
|
||||
namespaceSelector?: #labelSelector
|
||||
}
|
||||
#nodeSelecor: {
|
||||
key: string
|
||||
operator: *"In" | "NotIn" | "Exists" | "DoesNotExist" | "Gt" | "Lt"
|
||||
values?: [...string]
|
||||
}
|
||||
#nodeSelectorTerm: {
|
||||
matchExpressions?: [...#nodeSelecor]
|
||||
matchFields?: [...#nodeSelecor]
|
||||
}
|
||||
parameter: {
|
||||
// +usage=Specify the pod affinity scheduling rules
|
||||
podAffinity?: {
|
||||
// +usage=Specify the required during scheduling ignored during execution
|
||||
required?: [...#podAffinityTerm]
|
||||
// +usage=Specify the preferred during scheduling ignored during execution
|
||||
preferred?: [...{
|
||||
// +usage=Specify weight associated with matching the corresponding podAffinityTerm
|
||||
weight: int & >=1 & <=100
|
||||
// +usage=Specify a set of pods
|
||||
podAffinityTerm: #podAffinityTerm
|
||||
}]
|
||||
}
|
||||
// +usage=Specify the pod anti-affinity scheduling rules
|
||||
podAntiAffinity?: {
|
||||
// +usage=Specify the required during scheduling ignored during execution
|
||||
required?: [...#podAffinityTerm]
|
||||
// +usage=Specify the preferred during scheduling ignored during execution
|
||||
preferred?: [...{
|
||||
// +usage=Specify weight associated with matching the corresponding podAffinityTerm
|
||||
weight: int & >=1 & <=100
|
||||
// +usage=Specify a set of pods
|
||||
podAffinityTerm: #podAffinityTerm
|
||||
}]
|
||||
}
|
||||
// +usage=Specify the node affinity scheduling rules for the pod
|
||||
nodeAffinity?: {
|
||||
// +usage=Specify the required during scheduling ignored during execution
|
||||
required?: {
|
||||
// +usage=Specify a list of node selector
|
||||
nodeSelectorTerms: [...#nodeSelectorTerm]
|
||||
}
|
||||
// +usage=Specify the preferred during scheduling ignored during execution
|
||||
preferred?: [...{
|
||||
// +usage=Specify weight associated with matching the corresponding nodeSelector
|
||||
weight: int & >=1 & <=100
|
||||
// +usage=Specify a node selector
|
||||
preference: #nodeSelectorTerm
|
||||
}]
|
||||
}
|
||||
// +usage=Specify tolerant taint
|
||||
tolerations?: [...{
|
||||
key?: string
|
||||
operator: *"Equal" | "Exists"
|
||||
value?: string
|
||||
effect?: "NoSchedule" | "PreferNoSchedule" | "NoExecute"
|
||||
// +usage=Specify the period of time the toleration
|
||||
tolerationSeconds?: int
|
||||
}]
|
||||
}
|
||||
`
|
||||
)
|
||||
|
||||
|
||||
@@ -183,7 +183,7 @@ func convertStepProperties(step *v1beta1.WorkflowStep, app *v1beta1.Application)
|
||||
step.Inputs = append(step.Inputs, c.Inputs...)
|
||||
for index := range step.Inputs {
|
||||
parameterKey := strings.TrimSpace(step.Inputs[index].ParameterKey)
|
||||
if !strings.HasPrefix(parameterKey, "properties") && !strings.HasPrefix(parameterKey, "traits[") {
|
||||
if parameterKey != "" && !strings.HasPrefix(parameterKey, "properties") && !strings.HasPrefix(parameterKey, "traits[") {
|
||||
parameterKey = "properties." + parameterKey
|
||||
}
|
||||
step.Inputs[index].ParameterKey = parameterKey
|
||||
|
||||
56
pkg/controller/core.oam.dev/v1alpha2/application/testdata/definitions/panic.yaml
vendored
Normal file
56
pkg/controller/core.oam.dev/v1alpha2/application/testdata/definitions/panic.yaml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
apiVersion: core.oam.dev/v1beta1
|
||||
kind: TraitDefinition
|
||||
metadata:
|
||||
annotations: {}
|
||||
name: panic
|
||||
namespace: vela-system
|
||||
spec:
|
||||
schematic:
|
||||
cue:
|
||||
template: |
|
||||
pvcVolumesList: *[
|
||||
for v in parameter.pvc if v.mountPath != _|_ {
|
||||
{
|
||||
name: "pvc-" + v.name
|
||||
persistentVolumeClaim: claimName: v.name
|
||||
}
|
||||
},
|
||||
] | []
|
||||
configMapVolumesList: *[
|
||||
for v in parameter.configMap if v.mountPath != _|_ {
|
||||
{
|
||||
name: "configmap-" + v.name
|
||||
configMap: name: v.name
|
||||
}
|
||||
},
|
||||
] | []
|
||||
volumesList: pvcVolumesList + configMapVolumesList
|
||||
deDupVolumesArray: [
|
||||
for val in [
|
||||
for i, vi in volumesList {
|
||||
for j, vj in volumesList if j < i && vi.name == vj.name {
|
||||
ignore: true
|
||||
}
|
||||
vi
|
||||
},
|
||||
] if val.ignore == _|_ {
|
||||
val
|
||||
},
|
||||
]
|
||||
patch: spec: template: spec: {
|
||||
// +patchKey=name
|
||||
volumes: deDupVolumesArray
|
||||
}
|
||||
parameter: {
|
||||
// +usage=Declare pvc type storage
|
||||
pvc?: [...{
|
||||
name: string
|
||||
mountPath?: string
|
||||
}]
|
||||
|
||||
// +usage=Declare config map type storage
|
||||
configMap?: [...{
|
||||
name: string
|
||||
mountPath?: string
|
||||
}]
|
||||
}
|
||||
@@ -45,6 +45,7 @@ import (
|
||||
"github.com/oam-dev/kubevela/pkg/appfile/helm"
|
||||
velacue "github.com/oam-dev/kubevela/pkg/cue"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/packages"
|
||||
"github.com/oam-dev/kubevela/pkg/oam/util"
|
||||
"github.com/oam-dev/kubevela/pkg/utils/common"
|
||||
@@ -713,6 +714,7 @@ func getOpenAPISchema(capability types.Capability, pd *packages.PackageDiscover)
|
||||
}
|
||||
|
||||
// generateOpenAPISchemaFromCapabilityParameter returns the parameter of a definition in cue.Value format
|
||||
// nolint:staticcheck
|
||||
func generateOpenAPISchemaFromCapabilityParameter(capability types.Capability, pd *packages.PackageDiscover) ([]byte, error) {
|
||||
template, err := PrepareParameterCue(capability.Name, capability.CueTemplate)
|
||||
if err != nil {
|
||||
@@ -735,7 +737,7 @@ func generateOpenAPISchemaFromCapabilityParameter(capability types.Capability, p
|
||||
return common.GenOpenAPI(cueInst)
|
||||
}
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
err = bi.AddFile("-", template)
|
||||
err = value.AddFile(bi, "-", template)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ annotations: {
|
||||
|
||||
parameter: [string]: string
|
||||
`,
|
||||
want: want{data: "{\"additionalProperties\":{\"type\":\"string\"},\"type\":\"object\"}", err: nil},
|
||||
want: want{data: "{\"type\":\"object\"}", err: nil},
|
||||
},
|
||||
"parameter in cue is a string type,": {
|
||||
reason: "Prepare a normal parameter cue file",
|
||||
@@ -170,7 +170,7 @@ patch: {
|
||||
|
||||
parameter: [string]: string
|
||||
`,
|
||||
want: want{data: "{\"additionalProperties\":{\"type\":\"string\"},\"type\":\"object\"}", err: nil},
|
||||
want: want{data: "{\"type\":\"object\"}", err: nil},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -22,10 +22,10 @@ import (
|
||||
"strings"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/build"
|
||||
|
||||
"github.com/oam-dev/kubevela/apis/types"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/packages"
|
||||
)
|
||||
|
||||
@@ -34,27 +34,11 @@ var ErrParameterNotExist = errors.New("parameter not exist")
|
||||
|
||||
// GetParameters get parameter from cue template
|
||||
func GetParameters(templateStr string, pd *packages.PackageDiscover) ([]types.Parameter, error) {
|
||||
var template *cue.Instance
|
||||
var err error
|
||||
if pd != nil {
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
err := bi.AddFile("-", templateStr+BaseTemplate)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
template, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
r := cue.Runtime{}
|
||||
template, err = r.Compile("", templateStr+BaseTemplate)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
template, err := value.NewValue(templateStr+BaseTemplate, pd, "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tempStruct, err := template.Value().Struct()
|
||||
tempStruct, err := template.CueValue().Struct()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -42,10 +42,10 @@ func TestGetParameter(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []types.Parameter{
|
||||
{Name: "name", Required: true, Default: "", Type: cue.StringKind},
|
||||
{Name: "env", Required: false, Default: nil, Type: cue.ListKind},
|
||||
{Name: "image", Short: "i", Required: true, Usage: "Which image would you like to use for your service", Default: "", Type: cue.StringKind},
|
||||
{Name: "port", Short: "p", Required: false, Usage: "Which port do you want customer traffic sent to", Default: int64(8080),
|
||||
Type: cue.IntKind},
|
||||
{Name: "env", Required: false, Default: nil, Type: cue.ListKind},
|
||||
{Name: "cpu", Short: "", Required: false, Usage: "", Default: "", Type: cue.StringKind}},
|
||||
params)
|
||||
|
||||
@@ -54,9 +54,9 @@ func TestGetParameter(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []types.Parameter{
|
||||
{Name: "name", Required: true, Default: "", Type: cue.StringKind},
|
||||
{Name: "env", Required: false, Default: nil, Type: cue.ListKind},
|
||||
{Name: "image", Short: "i", Required: true, Usage: "Which image would you like to use for your service", Default: "", Type: cue.StringKind},
|
||||
{Name: "port", Short: "p", Usage: "Which port do you want customer traffic sent to", Default: int64(8080), Type: cue.IntKind},
|
||||
{Name: "env", Required: false, Default: nil, Type: cue.ListKind},
|
||||
{Name: "enable", Default: false, Type: cue.BoolKind},
|
||||
{Name: "fval", Default: 64.3, Type: cue.FloatKind},
|
||||
{Name: "nval", Default: float64(0), Required: true, Type: cue.NumberKind}}, params)
|
||||
|
||||
@@ -23,12 +23,15 @@ import (
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/build"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
"sigs.k8s.io/controller-runtime/pkg/client"
|
||||
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/sets"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/packages"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/process"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/task"
|
||||
@@ -88,7 +91,7 @@ func NewWorkloadAbstractEngine(name string, pd *packages.PackageDiscover) Abstra
|
||||
// Complete do workload definition's rendering
|
||||
func (wd *workloadDef) Complete(ctx process.Context, abstractTemplate string, params interface{}) error {
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
if err := bi.AddFile("-", abstractTemplate); err != nil {
|
||||
if err := value.AddFile(bi, "-", renderTemplate(abstractTemplate)); err != nil {
|
||||
return errors.WithMessagef(err, "invalid cue template of workload %s", wd.name)
|
||||
}
|
||||
var paramFile = model.ParameterFieldName + ": {}"
|
||||
@@ -101,7 +104,7 @@ func (wd *workloadDef) Complete(ctx process.Context, abstractTemplate string, pa
|
||||
paramFile = fmt.Sprintf("%s: %s", model.ParameterFieldName, string(bt))
|
||||
}
|
||||
}
|
||||
if err := bi.AddFile(model.ParameterFieldName, paramFile); err != nil {
|
||||
if err := value.AddFile(bi, model.ParameterFieldName, paramFile); err != nil {
|
||||
return errors.WithMessagef(err, "invalid parameter of workload %s", wd.name)
|
||||
}
|
||||
|
||||
@@ -109,19 +112,19 @@ func (wd *workloadDef) Complete(ctx process.Context, abstractTemplate string, pa
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := bi.AddFile("-", c); err != nil {
|
||||
if err := value.AddFile(bi, "context", c); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
inst, err := wd.pd.ImportPackagesAndBuildInstance(bi)
|
||||
val, err := wd.pd.ImportPackagesAndBuildValue(bi)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := inst.Value().Validate(); err != nil {
|
||||
if err := val.Validate(); err != nil {
|
||||
return errors.WithMessagef(err, "invalid cue template of workload %s after merge parameter and context", wd.name)
|
||||
}
|
||||
output := inst.Lookup(OutputFieldName)
|
||||
output := val.LookupPath(value.FieldPath(OutputFieldName))
|
||||
base, err := model.NewBase(output)
|
||||
if err != nil {
|
||||
return errors.WithMessagef(err, "invalid output of workload %s", wd.name)
|
||||
@@ -131,7 +134,7 @@ func (wd *workloadDef) Complete(ctx process.Context, abstractTemplate string, pa
|
||||
}
|
||||
|
||||
// we will support outputs for workload composition, and it will become trait in AppConfig.
|
||||
outputs := inst.Lookup(OutputsFieldName)
|
||||
outputs := val.LookupPath(value.FieldPath(OutputsFieldName))
|
||||
if !outputs.Exists() {
|
||||
return nil
|
||||
}
|
||||
@@ -219,12 +222,8 @@ func checkHealth(templateContext map[string]interface{}, healthPolicyTemplate st
|
||||
}
|
||||
|
||||
var buff = "context: " + string(bt) + "\n" + healthPolicyTemplate
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", buff)
|
||||
if err != nil {
|
||||
return false, errors.WithMessage(err, "compile health template")
|
||||
}
|
||||
healthy, err := inst.Lookup(HealthCheckPolicy).Bool()
|
||||
val := cuecontext.New().CompileString(buff)
|
||||
healthy, err := val.LookupPath(value.FieldPath(HealthCheckPolicy)).Bool()
|
||||
if err != nil {
|
||||
return false, errors.WithMessage(err, "evaluate health status")
|
||||
}
|
||||
@@ -244,15 +243,13 @@ func getStatusMessage(pd *packages.PackageDiscover, templateContext map[string]i
|
||||
if customStatusTemplate == "" {
|
||||
return "", nil
|
||||
}
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
var ctxBuff string
|
||||
var paramBuff = "parameter: {}\n"
|
||||
|
||||
bt, err := json.Marshal(templateContext)
|
||||
if err != nil {
|
||||
return "", errors.WithMessage(err, "json marshal template context")
|
||||
}
|
||||
ctxBuff = "context: " + string(bt) + "\n"
|
||||
ctxBuff := "context: " + string(bt) + "\n"
|
||||
|
||||
bt, err = json.Marshal(parameter)
|
||||
if err != nil {
|
||||
@@ -262,15 +259,12 @@ func getStatusMessage(pd *packages.PackageDiscover, templateContext map[string]i
|
||||
paramBuff = "parameter: " + string(bt) + "\n"
|
||||
}
|
||||
var buff = customStatusTemplate + "\n" + ctxBuff + paramBuff
|
||||
if err := bi.AddFile("-", buff); err != nil {
|
||||
return "", errors.WithMessagef(err, "invalid cue template of customStatus")
|
||||
}
|
||||
|
||||
inst, err := pd.ImportPackagesAndBuildInstance(bi)
|
||||
val, err := value.NewValue(buff, pd, "")
|
||||
if err != nil {
|
||||
return "", errors.WithMessage(err, "compile customStatus template")
|
||||
return "", errors.WithMessage(err, "compile status template")
|
||||
}
|
||||
message, err := inst.Lookup(CustomMessage).String()
|
||||
message, err := val.CueValue().LookupPath(value.FieldPath(CustomMessage)).String()
|
||||
if err != nil {
|
||||
return "", errors.WithMessage(err, "evaluate customStatus.message")
|
||||
}
|
||||
@@ -292,47 +286,43 @@ func NewTraitAbstractEngine(name string, pd *packages.PackageDiscover) AbstractE
|
||||
}
|
||||
|
||||
// Complete do trait definition's rendering
|
||||
// nolint:gocyclo
|
||||
func (td *traitDef) Complete(ctx process.Context, abstractTemplate string, params interface{}) error {
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
if err := bi.AddFile("-", abstractTemplate); err != nil {
|
||||
return errors.WithMessagef(err, "invalid template of trait %s", td.name)
|
||||
}
|
||||
var paramFile = model.ParameterFieldName + ": {}"
|
||||
buff := abstractTemplate + "\n"
|
||||
if params != nil {
|
||||
bt, err := json.Marshal(params)
|
||||
if err != nil {
|
||||
return errors.WithMessagef(err, "marshal parameter of trait %s", td.name)
|
||||
}
|
||||
if string(bt) != "null" {
|
||||
paramFile = fmt.Sprintf("%s: %s", model.ParameterFieldName, string(bt))
|
||||
buff += fmt.Sprintf("%s: %s\n", model.ParameterFieldName, string(bt))
|
||||
}
|
||||
}
|
||||
if err := bi.AddFile(model.ParameterFieldName, paramFile); err != nil {
|
||||
return errors.WithMessagef(err, "invalid parameter of trait %s", td.name)
|
||||
}
|
||||
c, err := ctx.ExtendedContextFile()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := bi.AddFile("context", c); err != nil {
|
||||
buff += c
|
||||
if err := value.AddFile(bi, "-", buff); err != nil {
|
||||
return errors.WithMessagef(err, "invalid context of trait %s", td.name)
|
||||
}
|
||||
|
||||
inst, err := td.pd.ImportPackagesAndBuildInstance(bi)
|
||||
val, err := td.pd.ImportPackagesAndBuildValue(bi)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := inst.Value().Validate(); err != nil {
|
||||
if err := val.Validate(); err != nil {
|
||||
return errors.WithMessagef(err, "invalid template of trait %s after merge with parameter and context", td.name)
|
||||
}
|
||||
processing := inst.Lookup("processing")
|
||||
processing := val.LookupPath(value.FieldPath("processing"))
|
||||
if processing.Exists() {
|
||||
if inst, err = task.Process(inst); err != nil {
|
||||
if val, err = task.Process(val); err != nil {
|
||||
return errors.WithMessagef(err, "invalid process of trait %s", td.name)
|
||||
}
|
||||
}
|
||||
outputs := inst.Lookup(OutputsFieldName)
|
||||
outputs := val.LookupPath(value.FieldPath(OutputsFieldName))
|
||||
if outputs.Exists() {
|
||||
st, err := outputs.Struct()
|
||||
if err != nil {
|
||||
@@ -353,34 +343,28 @@ func (td *traitDef) Complete(ctx process.Context, abstractTemplate string, param
|
||||
}
|
||||
}
|
||||
|
||||
patcher := inst.Lookup(PatchFieldName)
|
||||
patcher := val.LookupPath(value.FieldPath(PatchFieldName))
|
||||
base, auxiliaries := ctx.Output()
|
||||
if patcher.Exists() {
|
||||
p, err := model.NewOther(patcher)
|
||||
if err != nil {
|
||||
return errors.WithMessagef(err, "invalid patch of trait %s", td.name)
|
||||
}
|
||||
if err := base.Unify(p, sets.CreateUnifyOptionsForPatcher(patcher)...); err != nil {
|
||||
if base != nil && patcher.Exists() {
|
||||
if err := base.Unify(patcher, sets.CreateUnifyOptionsForPatcher(patcher)...); err != nil {
|
||||
return errors.WithMessagef(err, "invalid patch trait %s into workload", td.name)
|
||||
}
|
||||
}
|
||||
outputsPatcher := inst.Lookup(PatchOutputsFieldName)
|
||||
if outputsPatcher.Exists() {
|
||||
outputsPatcher := val.LookupPath(value.FieldPath(PatchOutputsFieldName))
|
||||
if base != nil && outputsPatcher.Exists() {
|
||||
for _, auxiliary := range auxiliaries {
|
||||
target := outputsPatcher.Lookup(auxiliary.Name)
|
||||
if target.Exists() {
|
||||
t, err := model.NewOther(target)
|
||||
if err != nil {
|
||||
return errors.WithMessagef(err, "trait=%s, to=%s, invalid trait patch", td.name, auxiliary.Name)
|
||||
}
|
||||
if err := auxiliary.Ins.Unify(t); err != nil {
|
||||
return errors.WithMessagef(err, "trait=%s, to=%s, invalid patch trait into auxiliary workload", td.name, auxiliary.Name)
|
||||
}
|
||||
target := outputsPatcher.LookupPath(value.FieldPath(auxiliary.Name))
|
||||
if !target.Exists() {
|
||||
return errors.WithMessagef(err, "trait=%s, to=%s, invalid patch trait into auxiliary workload", td.name, auxiliary.Name)
|
||||
}
|
||||
patcher := outputsPatcher.LookupPath(value.FieldPath(auxiliary.Name))
|
||||
if err := auxiliary.Ins.Unify(patcher); err != nil {
|
||||
return errors.WithMessagef(err, "trait=%s, to=%s, invalid patch trait into auxiliary workload", td.name, auxiliary.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
errs := inst.Lookup(ErrsFieldName)
|
||||
errs := val.LookupPath(value.FieldPath(ErrsFieldName))
|
||||
if errs.Exists() {
|
||||
if err := parseErrors(errs); err != nil {
|
||||
return err
|
||||
@@ -425,6 +409,13 @@ func initRoot(contextLabels map[string]string) map[string]interface{} {
|
||||
return root
|
||||
}
|
||||
|
||||
func renderTemplate(templ string) string {
|
||||
return templ + `
|
||||
context: _
|
||||
parameter: _
|
||||
`
|
||||
}
|
||||
|
||||
func (td *traitDef) getTemplateContext(ctx process.Context, cli client.Reader, accessor util.NamespaceAccessor) (map[string]interface{}, error) {
|
||||
var root = initRoot(ctx.BaseContextLabels())
|
||||
var commonLabels = GetCommonLabels(ctx.BaseContextLabels())
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
|
||||
@@ -957,16 +958,6 @@ patch: {
|
||||
metadata: name: none
|
||||
}
|
||||
|
||||
parameter: [string]: string`,
|
||||
params: map[string]interface{}{},
|
||||
hasCompileErr: true,
|
||||
},
|
||||
"incorrect use of the map field in patch will raise error": {
|
||||
traitTemplate: `
|
||||
patch: {
|
||||
metadata: annotations: parameter.none
|
||||
}
|
||||
|
||||
parameter: [string]: string`,
|
||||
params: map[string]interface{}{},
|
||||
hasCompileErr: true,
|
||||
@@ -992,7 +983,7 @@ patch: {
|
||||
|
||||
parameter: [string]: string`,
|
||||
params: map[string]interface{}{
|
||||
"wrong-keyword": "_|_ //",
|
||||
"wrong-keyword": 5,
|
||||
},
|
||||
hasCompileErr: true,
|
||||
},
|
||||
@@ -1080,22 +1071,23 @@ parameter: { errs: [...string] }`,
|
||||
return
|
||||
}
|
||||
td := NewTraitAbstractEngine(v.traitName, &packages.PackageDiscover{})
|
||||
r := require.New(t)
|
||||
err := td.Complete(ctx, v.traitTemplate, v.params)
|
||||
hasError := err != nil
|
||||
assert.Equal(t, v.hasCompileErr, hasError)
|
||||
if v.hasCompileErr {
|
||||
r.Error(err, cassinfo)
|
||||
continue
|
||||
}
|
||||
r.NoError(err, cassinfo)
|
||||
base, assists := ctx.Output()
|
||||
assert.Equal(t, len(v.expAssObjs), len(assists), cassinfo)
|
||||
assert.NotNil(t, base)
|
||||
r.Equal(len(v.expAssObjs), len(assists), cassinfo)
|
||||
r.NotNil(base)
|
||||
obj, err := base.Unstructured()
|
||||
assert.NoError(t, err, base.String())
|
||||
assert.Equal(t, v.expWorkload, obj, cassinfo)
|
||||
r.NoError(err)
|
||||
r.Equal(v.expWorkload, obj, cassinfo)
|
||||
for _, ss := range assists {
|
||||
got, err := ss.Ins.Unstructured()
|
||||
assert.NoError(t, err, cassinfo)
|
||||
assert.Equal(t, v.expAssObjs[ss.Type+ss.Name], got, "case %s , type: %s name: %s", cassinfo, ss.Type, ss.Name)
|
||||
r.NoError(err, cassinfo)
|
||||
r.Equal(v.expAssObjs[ss.Type+ss.Name], got, "case %s , type: %s name: %s, got: %s", cassinfo, ss.Type, ss.Name, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1178,7 +1170,9 @@ outputs: service :{
|
||||
_, assists := ctx.Output()
|
||||
for i, ss := range assists {
|
||||
assert.Equal(t, ss.Name, v.order[i].name)
|
||||
assert.Equal(t, ss.Ins.String(), v.order[i].content)
|
||||
s, err := ss.Ins.String()
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, s, v.order[i].content)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1261,7 +1255,9 @@ outputs: abc :{
|
||||
_, assists := ctx.Output()
|
||||
for i, ss := range assists {
|
||||
assert.Equal(t, ss.Name, v.order[i].name)
|
||||
assert.Equal(t, ss.Ins.String(), v.order[i].content)
|
||||
s, err := ss.Ins.String()
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, s, v.order[i].content)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,12 +17,7 @@ limitations under the License.
|
||||
package model
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/build"
|
||||
"cuelang.org/go/cue/format"
|
||||
"github.com/pkg/errors"
|
||||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
"k8s.io/klog/v2"
|
||||
@@ -32,20 +27,25 @@ import (
|
||||
|
||||
// Instance defines Model Interface
|
||||
type Instance interface {
|
||||
String() string
|
||||
String() (string, error)
|
||||
Value() cue.Value
|
||||
Unstructured() (*unstructured.Unstructured, error)
|
||||
IsBase() bool
|
||||
Unify(other Instance, options ...sets.UnifyOption) error
|
||||
Unify(other cue.Value, options ...sets.UnifyOption) error
|
||||
Compile() ([]byte, error)
|
||||
}
|
||||
|
||||
type instance struct {
|
||||
v string
|
||||
v cue.Value
|
||||
base bool
|
||||
}
|
||||
|
||||
// String return instance's cue format string
|
||||
func (inst *instance) String() string {
|
||||
func (inst *instance) String() (string, error) {
|
||||
return sets.ToString(inst.v)
|
||||
}
|
||||
|
||||
func (inst *instance) Value() cue.Value {
|
||||
return inst.v
|
||||
}
|
||||
|
||||
@@ -55,21 +55,14 @@ func (inst *instance) IsBase() bool {
|
||||
}
|
||||
|
||||
func (inst *instance) Compile() ([]byte, error) {
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
err := bi.AddFile("-", inst.v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var r cue.Runtime
|
||||
it, err := r.Build(bi)
|
||||
if err != nil {
|
||||
if err := inst.v.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// compiled object should be final and concrete value
|
||||
if err := it.Value().Validate(cue.Concrete(true), cue.Final()); err != nil {
|
||||
if err := inst.v.Validate(cue.Concrete(true), cue.Final()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return it.Value().MarshalJSON()
|
||||
return inst.v.MarshalJSON()
|
||||
}
|
||||
|
||||
// Unstructured convert cue values to unstructured.Unstructured
|
||||
@@ -77,7 +70,7 @@ func (inst *instance) Compile() ([]byte, error) {
|
||||
func (inst *instance) Unstructured() (*unstructured.Unstructured, error) {
|
||||
jsonv, err := inst.Compile()
|
||||
if err != nil {
|
||||
klog.ErrorS(err, "failed to have the workload/trait unstructured", "Definition", inst.String())
|
||||
klog.ErrorS(err, "failed to have the workload/trait unstructured", "Definition", inst.v)
|
||||
return nil, errors.Wrap(err, "failed to have the workload/trait unstructured")
|
||||
}
|
||||
o := &unstructured.Unstructured{}
|
||||
@@ -88,8 +81,8 @@ func (inst *instance) Unstructured() (*unstructured.Unstructured, error) {
|
||||
}
|
||||
|
||||
// Unify implement unity operations between instances
|
||||
func (inst *instance) Unify(other Instance, options ...sets.UnifyOption) error {
|
||||
pv, err := sets.StrategyUnify(inst.v, other.String(), options...)
|
||||
func (inst *instance) Unify(other cue.Value, options ...sets.UnifyOption) error {
|
||||
pv, err := sets.StrategyUnify(inst.v, other, options...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -99,59 +92,15 @@ func (inst *instance) Unify(other Instance, options ...sets.UnifyOption) error {
|
||||
|
||||
// NewBase create a base instance
|
||||
func NewBase(v cue.Value) (Instance, error) {
|
||||
vs, err := openPrint(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &instance{
|
||||
v: vs,
|
||||
v: v,
|
||||
base: true,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// NewOther create a non-base instance
|
||||
func NewOther(v cue.Value) (Instance, error) {
|
||||
vs, err := openPrint(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &instance{
|
||||
v: vs,
|
||||
v: v,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func openPrint(v cue.Value) (string, error) {
|
||||
sysopts := []cue.Option{cue.All(), cue.DisallowCycles(true), cue.ResolveReferences(true), cue.Docs(true)}
|
||||
f, err := sets.ToFile(v.Syntax(sysopts...))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
for _, decl := range f.Decls {
|
||||
sets.ListOpen(decl)
|
||||
}
|
||||
|
||||
ret, err := format.Node(f)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
errInfo, contain := IndexMatchLine(string(ret), "_|_")
|
||||
if contain {
|
||||
return "", errors.New(errInfo)
|
||||
}
|
||||
return string(ret), nil
|
||||
}
|
||||
|
||||
// IndexMatchLine will index and extract the line contains the pattern.
|
||||
func IndexMatchLine(ret, target string) (string, bool) {
|
||||
if strings.Contains(ret, target) {
|
||||
if target == "_|_" {
|
||||
r := regexp.MustCompile(`_\|_[\s]//.*`)
|
||||
match := r.FindAllString(ret, -1)
|
||||
if len(match) > 0 {
|
||||
return strings.Join(match, ","), true
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
@@ -20,51 +20,12 @@ import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
)
|
||||
|
||||
func TestGetCompileError(t *testing.T) {
|
||||
testcases := []struct {
|
||||
src string
|
||||
wantErr bool
|
||||
errInfo string
|
||||
}{{
|
||||
src: ` env: [{
|
||||
name: "HELLO"
|
||||
value: "_A_|_B_|_C_"
|
||||
}]`,
|
||||
wantErr: false,
|
||||
errInfo: "",
|
||||
}, {
|
||||
src: ` env: [{
|
||||
name: conflicting
|
||||
value: _|_ // conflicting values "ENV_LEVEL" and "JAVA_TOOL_OPTIONS"
|
||||
}]`,
|
||||
wantErr: true,
|
||||
errInfo: "_|_ // conflicting values \"ENV_LEVEL\" and \"JAVA_TOOL_OPTIONS\"",
|
||||
}, {
|
||||
src: ` env: [{
|
||||
name: conflicting-1
|
||||
value: _|_ // conflicting values "ENV_LEVEL" and "JAVA_TOOL_OPTIONS"
|
||||
},{
|
||||
name: conflicting-2
|
||||
value: _|_ // conflicting values "HELLO" and "WORLD"
|
||||
}]`,
|
||||
wantErr: true,
|
||||
errInfo: "_|_ // conflicting values \"ENV_LEVEL\" and \"JAVA_TOOL_OPTIONS\"," +
|
||||
"_|_ // conflicting values \"HELLO\" and \"WORLD\"",
|
||||
}}
|
||||
for _, tt := range testcases {
|
||||
errInfo, contains := IndexMatchLine(tt.src, "_|_")
|
||||
assert.Equal(t, tt.wantErr, contains)
|
||||
assert.Equal(t, tt.errInfo, errInfo)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestInstance(t *testing.T) {
|
||||
|
||||
testCases := []struct {
|
||||
@@ -83,12 +44,7 @@ metadata: name: "test"
|
||||
}
|
||||
|
||||
for _, v := range testCases {
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", v.src)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
inst := cuecontext.New().CompileString(v.src)
|
||||
base, err := NewBase(inst.Value())
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
@@ -169,9 +125,7 @@ output: {
|
||||
}
|
||||
`
|
||||
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", base)
|
||||
assert.NoError(t, err)
|
||||
inst := cuecontext.New().CompileString(base)
|
||||
newbase, err := NewBase(inst.Value())
|
||||
assert.NoError(t, err)
|
||||
data, err := newbase.Unstructured()
|
||||
@@ -181,26 +135,27 @@ output: {
|
||||
}
|
||||
|
||||
func TestError(t *testing.T) {
|
||||
ctx := cuecontext.New()
|
||||
ins := &instance{
|
||||
v: ``,
|
||||
v: ctx.CompileString(``),
|
||||
}
|
||||
_, err := ins.Unstructured()
|
||||
assert.Equal(t, err.Error(), "Object 'Kind' is missing in '{}'")
|
||||
ins = &instance{
|
||||
v: `
|
||||
v: ctx.CompileString(`
|
||||
apiVersion: "apps/v1"
|
||||
kind: "Deployment"
|
||||
metadata: name: parameter.name
|
||||
`,
|
||||
`),
|
||||
}
|
||||
_, err = ins.Unstructured()
|
||||
assert.Equal(t, err.Error(), fmt.Sprintf(`failed to have the workload/trait unstructured: metadata.name: reference "%s" not found`, ParameterFieldName))
|
||||
assert.Equal(t, err.Error(), fmt.Sprintf("failed to have the workload/trait unstructured: metadata.name: reference \"%s\" not found", ParameterFieldName))
|
||||
ins = &instance{
|
||||
v: `
|
||||
v: ctx.CompileString(`
|
||||
apiVersion: "apps/v1"
|
||||
kind: "Deployment"
|
||||
metadata: name: "abc"
|
||||
`,
|
||||
`),
|
||||
}
|
||||
obj, err := ins.Unstructured()
|
||||
assert.Equal(t, err, nil)
|
||||
@@ -215,7 +170,7 @@ metadata: name: "abc"
|
||||
})
|
||||
|
||||
ins = &instance{
|
||||
v: `
|
||||
v: ctx.CompileString(`
|
||||
apiVersion: "source.toolkit.fluxcd.io/v1beta1"
|
||||
metadata: {
|
||||
name: "grafana"
|
||||
@@ -224,7 +179,7 @@ kind: "HelmRepository"
|
||||
spec: {
|
||||
url: string
|
||||
interval: *"5m" | string
|
||||
}`,
|
||||
}`),
|
||||
}
|
||||
o, err := ins.Unstructured()
|
||||
assert.Nil(t, o)
|
||||
|
||||
@@ -22,7 +22,7 @@ import (
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
jsonpatch "github.com/evanphx/json-patch"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
@@ -190,7 +190,10 @@ func strategyPatchHandle() interceptor {
|
||||
paths := append(ctx.Pos(), labelStr(field.Label))
|
||||
baseSubNode, err := lookUp(baseNode, paths...)
|
||||
if err != nil {
|
||||
return
|
||||
if errors.Is(err, notFoundErr) {
|
||||
return
|
||||
}
|
||||
baseSubNode = ast.NewList()
|
||||
}
|
||||
baselist, ok := baseSubNode.(*ast.ListLit)
|
||||
if !ok {
|
||||
@@ -256,69 +259,60 @@ func IsJSONPatch(patcher cue.Value) bool {
|
||||
}
|
||||
|
||||
// StrategyUnify unify the objects by the strategy
|
||||
func StrategyUnify(base, patch string, options ...UnifyOption) (ret string, err error) {
|
||||
func StrategyUnify(base, patch cue.Value, options ...UnifyOption) (ret cue.Value, err error) {
|
||||
params := newUnifyParams(options...)
|
||||
var patchOpts []interceptor
|
||||
if params.PatchStrategy == StrategyJSONMergePatch || params.PatchStrategy == StrategyJSONPatch {
|
||||
base, err = OpenBaiscLit(base)
|
||||
_, err := OpenBaiscLit(base)
|
||||
if err != nil {
|
||||
return base, err
|
||||
}
|
||||
} else {
|
||||
patchOpts = []interceptor{strategyPatchHandle()}
|
||||
}
|
||||
baseFile, err := parser.ParseFile("-", base, parser.ParseComments)
|
||||
if err != nil {
|
||||
return "", errors.WithMessage(err, "invalid base cue file")
|
||||
}
|
||||
patchFile, err := parser.ParseFile("-", patch, parser.ParseComments)
|
||||
if err != nil {
|
||||
return "", errors.WithMessage(err, "invalid patch cue file")
|
||||
}
|
||||
|
||||
return strategyUnify(baseFile, patchFile, params, patchOpts...)
|
||||
return strategyUnify(base, patch, params, patchOpts...)
|
||||
}
|
||||
|
||||
func strategyUnify(baseFile *ast.File, patchFile *ast.File, params *UnifyParams, patchOpts ...interceptor) (string, error) {
|
||||
// nolint:staticcheck
|
||||
func strategyUnify(base cue.Value, patch cue.Value, params *UnifyParams, patchOpts ...interceptor) (val cue.Value, err error) {
|
||||
if params.PatchStrategy == StrategyJSONMergePatch {
|
||||
return jsonMergePatch(base, patch)
|
||||
} else if params.PatchStrategy == StrategyJSONPatch {
|
||||
return jsonPatch(base, patch.LookupPath(cue.ParsePath("operations")))
|
||||
}
|
||||
openBase, err := openListLit(base)
|
||||
if err != nil {
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to open list it for merge")
|
||||
}
|
||||
patchFile, err := ToFile(patch.Syntax(cue.Docs(true), cue.ResolveReferences(true)))
|
||||
if err != nil {
|
||||
return cue.Value{}, err
|
||||
}
|
||||
for _, option := range patchOpts {
|
||||
if err := option(baseFile, patchFile); err != nil {
|
||||
return "", errors.WithMessage(err, "process patchOption")
|
||||
if err := option(openBase, patchFile); err != nil {
|
||||
return cue.Value{}, errors.WithMessage(err, "process patchOption")
|
||||
}
|
||||
}
|
||||
|
||||
var r cue.Runtime
|
||||
baseInst := cuecontext.New().BuildFile(openBase)
|
||||
patchInst := cuecontext.New().BuildFile(patchFile)
|
||||
|
||||
baseInst, err := r.CompileFile(baseFile)
|
||||
ret := baseInst.Unify(patchInst)
|
||||
|
||||
_, err = toString(ret, removeTmpVar)
|
||||
if err != nil {
|
||||
return "", errors.WithMessage(err, "compile base file")
|
||||
}
|
||||
patchInst, err := r.CompileFile(patchFile)
|
||||
if err != nil {
|
||||
return "", errors.WithMessage(err, "compile patch file")
|
||||
}
|
||||
|
||||
if params.PatchStrategy == StrategyJSONMergePatch {
|
||||
return jsonMergePatch(baseInst.Value(), patchInst.Value())
|
||||
} else if params.PatchStrategy == StrategyJSONPatch {
|
||||
return jsonPatch(baseInst.Value(), patchInst.Lookup("operations"))
|
||||
}
|
||||
|
||||
ret := baseInst.Value().Unify(patchInst.Value())
|
||||
|
||||
rv, err := toString(ret, removeTmpVar)
|
||||
if err != nil {
|
||||
return rv, errors.WithMessage(err, " format result toString")
|
||||
return ret, errors.WithMessage(err, " format result toString")
|
||||
}
|
||||
|
||||
if err := ret.Err(); err != nil {
|
||||
return rv, errors.WithMessage(err, "result check err")
|
||||
return ret, errors.WithMessage(err, "result check err")
|
||||
}
|
||||
|
||||
if err := ret.Validate(cue.All()); err != nil {
|
||||
return rv, errors.WithMessage(err, "result validate")
|
||||
return ret, errors.WithMessage(err, "result validate")
|
||||
}
|
||||
|
||||
return rv, nil
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func findCommentTag(commentGroup []*ast.CommentGroup) map[string]string {
|
||||
@@ -348,47 +342,51 @@ func findCommentTag(commentGroup []*ast.CommentGroup) map[string]string {
|
||||
return kval
|
||||
}
|
||||
|
||||
func jsonMergePatch(base cue.Value, patch cue.Value) (string, error) {
|
||||
func jsonMergePatch(base cue.Value, patch cue.Value) (cue.Value, error) {
|
||||
ctx := cuecontext.New()
|
||||
baseJSON, err := base.MarshalJSON()
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to marshal base value")
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to marshal base value")
|
||||
}
|
||||
patchJSON, err := patch.MarshalJSON()
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to marshal patch value")
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to marshal patch value")
|
||||
}
|
||||
merged, err := jsonpatch.MergePatch(baseJSON, patchJSON)
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to merge base value and patch value by JsonMergePatch")
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to merge base value and patch value by JsonMergePatch")
|
||||
}
|
||||
output, err := OpenBaiscLit(string(merged))
|
||||
val := ctx.CompileBytes(merged)
|
||||
output, err := OpenBaiscLit(val)
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to parse open basic lit for merged result")
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to parse open basic lit for merged result")
|
||||
}
|
||||
return output, nil
|
||||
return ctx.BuildFile(output), nil
|
||||
}
|
||||
|
||||
func jsonPatch(base cue.Value, patch cue.Value) (string, error) {
|
||||
func jsonPatch(base cue.Value, patch cue.Value) (cue.Value, error) {
|
||||
ctx := cuecontext.New()
|
||||
baseJSON, err := base.MarshalJSON()
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to marshal base value")
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to marshal base value")
|
||||
}
|
||||
patchJSON, err := patch.MarshalJSON()
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to marshal patch value")
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to marshal patch value")
|
||||
}
|
||||
decodedPatch, err := jsonpatch.DecodePatch(patchJSON)
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to decode patch")
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to decode patch")
|
||||
}
|
||||
|
||||
merged, err := decodedPatch.Apply(baseJSON)
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to apply json patch")
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to apply json patch")
|
||||
}
|
||||
output, err := OpenBaiscLit(string(merged))
|
||||
val := ctx.CompileBytes(merged)
|
||||
output, err := OpenBaiscLit(val)
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to parse open basic lit for merged result")
|
||||
return cue.Value{}, errors.Wrapf(err, "failed to parse open basic lit for merged result")
|
||||
}
|
||||
return output, nil
|
||||
return ctx.BuildFile(output), nil
|
||||
}
|
||||
|
||||
@@ -21,15 +21,19 @@ import (
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"github.com/bmizerany/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestPatch(t *testing.T) {
|
||||
|
||||
testCase := []struct {
|
||||
base string
|
||||
patch string
|
||||
result string
|
||||
base string
|
||||
patch string
|
||||
result string
|
||||
expectedErr string
|
||||
}{
|
||||
{
|
||||
base: `containers: [{name: "x1"},{name: "x2"},...]`,
|
||||
@@ -44,15 +48,27 @@ func TestPatch(t *testing.T) {
|
||||
},
|
||||
|
||||
{
|
||||
base: `containers: [{name: "x1"},{name: "x2"},...]`,
|
||||
patch: `containers: [{name: "x2"},{name: "x1"}]`,
|
||||
result: "_|_\n",
|
||||
base: `containers: [{name: "x1"},{name: "x2"},...]`,
|
||||
patch: `containers: [{name: "x2"},{name: "x1"}]`,
|
||||
result: `containers: [{
|
||||
name: _|_ // containers.0.name: conflicting values "x2" and "x1"
|
||||
}, {
|
||||
name: _|_ // containers.1.name: conflicting values "x1" and "x2"
|
||||
}]
|
||||
`,
|
||||
expectedErr: `conflicting values "x2" and "x1"`,
|
||||
},
|
||||
|
||||
{
|
||||
base: `containers: [{name: _|_},{name: "x2"},...]`,
|
||||
patch: `containers: [{name: _|_},{name: "x2"}]`,
|
||||
result: "_|_\n",
|
||||
base: `containers: [{name: _|_},{name: "x2"},...]`,
|
||||
patch: `containers: [{name: _|_},{name: "x2"}]`,
|
||||
result: `containers: [{
|
||||
name: _|_ // explicit error (_|_ literal) in source (and 1 more errors)
|
||||
}, {
|
||||
name: "x2"
|
||||
}]
|
||||
`,
|
||||
expectedErr: "explicit error (_|_ literal) in source",
|
||||
},
|
||||
|
||||
{
|
||||
@@ -70,12 +86,16 @@ containers: [{
|
||||
},
|
||||
|
||||
{
|
||||
// lose close here
|
||||
base: `containers: [close({namex: "x1"}),...]`,
|
||||
patch: `
|
||||
// +patchKey=name
|
||||
containers: [{name: "x2"},{name: "x1"}]`,
|
||||
result: ` // +patchKey=name
|
||||
containers: [_|_, // field "name" not allowed in closed struct{
|
||||
result: `// +patchKey=name
|
||||
containers: [{
|
||||
namex: "x1"
|
||||
name: "x2"
|
||||
}, {
|
||||
name: "x1"
|
||||
}, ...]
|
||||
`,
|
||||
@@ -103,17 +123,11 @@ containers: [{
|
||||
base: `containers: [{name: "x1"},{name: "x2"},...]`,
|
||||
patch: `
|
||||
// +patchKey=name
|
||||
containers: [{noname: "x3"}]`,
|
||||
result: "_|_\n",
|
||||
},
|
||||
{
|
||||
base: `containers: [{name: "x1"},{name: "x2"},...]`,
|
||||
patch: `
|
||||
// +patchKey=name
|
||||
containers: []`,
|
||||
containers: [{noname: "x3"},...]`,
|
||||
result: `// +patchKey=name
|
||||
containers: [{
|
||||
name: "x1"
|
||||
name: "x1"
|
||||
noname: "x3"
|
||||
}, {
|
||||
name: "x2"
|
||||
}, ...]
|
||||
@@ -121,8 +135,7 @@ containers: [{
|
||||
},
|
||||
{
|
||||
base: `containers: [{name: "x1"},{name: "x2"},...]`,
|
||||
patch: `
|
||||
// +patchKey=name
|
||||
patch: `// +patchKey=name
|
||||
containers: [{noname: "x3"},{name: "x1"}]`,
|
||||
result: `// +patchKey=name
|
||||
containers: [{
|
||||
@@ -154,16 +167,17 @@ containers: [{
|
||||
patch: `
|
||||
// +patchKey=name
|
||||
containers: [{name: "x2", envs: [close({name: "OPS", value: "OAM"})]}]`,
|
||||
// TODO: fix losing close struct in cue
|
||||
result: `// +patchKey=name
|
||||
containers: [close({
|
||||
containers: [{
|
||||
name: "x1"
|
||||
}), close({
|
||||
}, {
|
||||
name: "x2"
|
||||
envs: [close({
|
||||
envs: [{
|
||||
name: "OPS"
|
||||
value: "OAM"
|
||||
}), ...]
|
||||
}), ...]
|
||||
}, ...]
|
||||
}, ...]
|
||||
`,
|
||||
},
|
||||
|
||||
@@ -290,16 +304,17 @@ containers: [{
|
||||
},...]`,
|
||||
result: `containers: [{
|
||||
volumeMounts: [{
|
||||
name: "k1"
|
||||
path: "p1"
|
||||
name: "k1"
|
||||
}, {
|
||||
name: "k1"
|
||||
path: "p2"
|
||||
name: "k1"
|
||||
}, {
|
||||
path: "p3"
|
||||
name: "k2"
|
||||
path: "p3"
|
||||
}]
|
||||
}, ...]
|
||||
|
||||
// +patchKey=name
|
||||
volumes: [{
|
||||
name: "x1"
|
||||
@@ -369,12 +384,44 @@ containers: [{
|
||||
}, ...]
|
||||
}, ...]
|
||||
`},
|
||||
{
|
||||
base: `containers: [{name: "x1"}]`,
|
||||
patch: `
|
||||
containers: [{
|
||||
// +patchKey=name
|
||||
env: [{
|
||||
name: "k"
|
||||
value: "v"
|
||||
}]
|
||||
}, ...]`,
|
||||
result: `containers: [{
|
||||
name: "x1"
|
||||
// +patchKey=name
|
||||
env: [{
|
||||
name: "k"
|
||||
value: "v"
|
||||
}]
|
||||
}, ...]
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for i, tcase := range testCase {
|
||||
t.Run(fmt.Sprintf("case-%d", i), func(t *testing.T) {
|
||||
v, _ := StrategyUnify(tcase.base, tcase.patch)
|
||||
assert.Equal(t, v, tcase.result, fmt.Sprintf("testPatch for case(no:%d) %s", i, v))
|
||||
r := require.New(t)
|
||||
ctx := cuecontext.New()
|
||||
base := ctx.CompileString(tcase.base)
|
||||
patch := ctx.CompileString(tcase.patch)
|
||||
v, err := StrategyUnify(base, patch)
|
||||
if tcase.expectedErr != "" {
|
||||
r.Error(err)
|
||||
r.Contains(err.Error(), tcase.expectedErr)
|
||||
return
|
||||
}
|
||||
r.NoError(err)
|
||||
s, err := toString(v)
|
||||
r.NoError(err)
|
||||
r.Equal(s, tcase.result, fmt.Sprintf("testPatch for case(no:%d) %s", i, v))
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -404,6 +451,9 @@ spec: {
|
||||
strategy: {
|
||||
// +patchStrategy=retainKeys
|
||||
type: "recreate"
|
||||
rollingUpdate: {
|
||||
maxSurge: "30%"
|
||||
}
|
||||
}
|
||||
}
|
||||
`},
|
||||
@@ -427,6 +477,9 @@ spec: {
|
||||
strategy: {
|
||||
// +patchStrategy=retainKeys
|
||||
type: "recreate"
|
||||
rollingUpdate: {
|
||||
maxSurge: "30%"
|
||||
}
|
||||
}
|
||||
}
|
||||
`},
|
||||
@@ -457,7 +510,7 @@ volumes: [{
|
||||
configMap: {
|
||||
name: "conf-name"
|
||||
}
|
||||
}]
|
||||
}, ...]
|
||||
`},
|
||||
|
||||
{
|
||||
@@ -493,7 +546,7 @@ volumes: [{
|
||||
configMap: {
|
||||
name: "conf-name"
|
||||
}
|
||||
}]
|
||||
}, ...]
|
||||
`},
|
||||
|
||||
{
|
||||
@@ -525,8 +578,8 @@ containers: [{
|
||||
envs: [{
|
||||
name: "e1"
|
||||
value: "v2"
|
||||
}]
|
||||
}]
|
||||
}, ...]
|
||||
}, ...]
|
||||
`},
|
||||
|
||||
{
|
||||
@@ -549,9 +602,9 @@ spec: {
|
||||
envs:[{name: "e1",value: "v2"}]
|
||||
}]}
|
||||
`,
|
||||
result: `// +patchKey=name
|
||||
// +patchStrategy=retainKeys
|
||||
spec: {
|
||||
result: `spec: {
|
||||
// +patchKey=name
|
||||
// +patchStrategy=retainKeys
|
||||
containers: [{
|
||||
name: "c2"
|
||||
envs: [{
|
||||
@@ -588,8 +641,15 @@ metadata: {
|
||||
}
|
||||
|
||||
for i, tcase := range testCase {
|
||||
v, _ := StrategyUnify(tcase.base, tcase.patch)
|
||||
assert.Equal(t, v, tcase.result, fmt.Sprintf("testPatch for case(no:%d) %s", i, v))
|
||||
r := require.New(t)
|
||||
ctx := cuecontext.New()
|
||||
base := ctx.CompileString(tcase.base)
|
||||
patch := ctx.CompileString(tcase.patch)
|
||||
v, err := StrategyUnify(base, patch)
|
||||
r.NoError(err)
|
||||
s, err := toString(v)
|
||||
r.NoError(err)
|
||||
r.Equal(s, tcase.result, fmt.Sprintf("testPatch for case(no:%d) %s", i, s))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -606,13 +666,10 @@ func TestParseCommentTags(t *testing.T) {
|
||||
x: null
|
||||
`
|
||||
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", temp)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
ms := findCommentTag(inst.Lookup("x").Doc())
|
||||
file, err := parser.ParseFile("-", temp, parser.ParseComments)
|
||||
assert.Equal(t, err == nil, true)
|
||||
v := cuecontext.New().BuildFile(file)
|
||||
ms := findCommentTag(v.LookupPath(cue.ParsePath("x")).Doc())
|
||||
assert.Equal(t, ms, map[string]string{
|
||||
"patchKey": "name",
|
||||
"testKey1": "testValue1",
|
||||
|
||||
@@ -23,8 +23,6 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"cuelang.org/go/cue/parser"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/format"
|
||||
@@ -154,7 +152,13 @@ func extractFuncName(expr ast.Expr) (string, []ast.Expr) {
|
||||
func getPaths(node ast.Expr) []string {
|
||||
switch v := node.(type) {
|
||||
case *ast.SelectorExpr:
|
||||
return append(getPaths(v.X), v.Sel.Name)
|
||||
var sel string
|
||||
if l, ok := v.Sel.(*ast.Ident); ok {
|
||||
sel = l.Name
|
||||
} else {
|
||||
sel = fmt.Sprint(v.Sel)
|
||||
}
|
||||
return append(getPaths(v.X), sel)
|
||||
case *ast.Ident:
|
||||
return []string{v.Name}
|
||||
case *ast.BasicLit:
|
||||
@@ -199,9 +203,9 @@ func labelStr(label ast.Label) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// nolint:staticcheck
|
||||
func toString(v cue.Value, opts ...func(node ast.Node) ast.Node) (string, error) {
|
||||
v = v.Eval()
|
||||
syopts := []cue.Option{cue.All(), cue.DisallowCycles(true), cue.ResolveReferences(true), cue.Docs(true)}
|
||||
syopts := []cue.Option{cue.All(), cue.ResolveReferences(true), cue.DisallowCycles(true), cue.Docs(true), cue.Attributes(true)}
|
||||
|
||||
var w bytes.Buffer
|
||||
useSep := false
|
||||
@@ -296,14 +300,41 @@ func OptBytesToString(node ast.Node) ast.Node {
|
||||
}
|
||||
|
||||
// OpenBaiscLit make that the basicLit can be modified.
|
||||
func OpenBaiscLit(s string) (string, error) {
|
||||
f, err := parser.ParseFile("-", s, parser.ParseComments)
|
||||
// nolint:staticcheck
|
||||
func OpenBaiscLit(val cue.Value) (*ast.File, error) {
|
||||
f, err := ToFile(val.Syntax(cue.Docs(true), cue.ResolveReferences(true)))
|
||||
if err != nil {
|
||||
return "", err
|
||||
return nil, err
|
||||
}
|
||||
openBaiscLit(f)
|
||||
b, err := format.Node(f)
|
||||
return string(b), err
|
||||
return f, err
|
||||
}
|
||||
|
||||
// nolint:staticcheck
|
||||
func openListLit(val cue.Value) (*ast.File, error) {
|
||||
f, err := ToFile(val.Syntax(cue.Docs(true), cue.ResolveReferences(true)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ast.Walk(f, func(node ast.Node) bool {
|
||||
field, ok := node.(*ast.Field)
|
||||
if ok {
|
||||
v := field.Value
|
||||
switch lit := v.(type) {
|
||||
case *ast.ListLit:
|
||||
if len(lit.Elts) > 0 {
|
||||
if _, ok := lit.Elts[len(lit.Elts)-1].(*ast.Ellipsis); ok {
|
||||
break
|
||||
}
|
||||
}
|
||||
newList := lit.Elts
|
||||
newList = append(newList, &ast.Ellipsis{})
|
||||
field.Value = ast.NewList(newList...)
|
||||
}
|
||||
}
|
||||
return true
|
||||
}, nil)
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func openBaiscLit(root ast.Node) {
|
||||
|
||||
@@ -18,10 +18,9 @@ package sets
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue/format"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/format"
|
||||
"cuelang.org/go/cue/literal"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"github.com/pkg/errors"
|
||||
@@ -56,8 +55,8 @@ if true {
|
||||
}
|
||||
lacy: string
|
||||
`,
|
||||
expected: `foo: int
|
||||
lacy: string
|
||||
expected: `lacy: string
|
||||
foo: int
|
||||
`},
|
||||
{
|
||||
s: `
|
||||
@@ -72,11 +71,9 @@ if foo > 5 {
|
||||
}
|
||||
`},
|
||||
}
|
||||
var r cue.Runtime
|
||||
for _, tcase := range testCases {
|
||||
inst, err := r.Compile("-", tcase.s)
|
||||
assert.NilError(t, err)
|
||||
str, err := ToString(inst.Value())
|
||||
inst := cuecontext.New().CompileString(tcase.s)
|
||||
str, err := ToString(inst)
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, str, tcase.expected)
|
||||
}
|
||||
@@ -122,16 +119,17 @@ abc
|
||||
`,
|
||||
expected: `foo: int
|
||||
lacy: """
|
||||
abc
|
||||
123
|
||||
"""
|
||||
abc
|
||||
123
|
||||
"""
|
||||
`},
|
||||
}
|
||||
|
||||
var r cue.Runtime
|
||||
var r = cuecontext.New()
|
||||
for _, tcase := range testCases {
|
||||
inst, err := r.Compile("-", tcase.s)
|
||||
file, err := parser.ParseFile("-", tcase.s)
|
||||
assert.NilError(t, err)
|
||||
inst := r.BuildFile(file)
|
||||
str, err := ToString(inst.Value(), OptBytesToString)
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, str, tcase.expected)
|
||||
@@ -227,21 +225,20 @@ wait: {
|
||||
},
|
||||
}
|
||||
|
||||
var r cue.Runtime
|
||||
var r = cuecontext.New()
|
||||
for _, tCase := range testCases {
|
||||
f, err := parser.ParseFile("-", tCase.src)
|
||||
assert.NilError(t, err)
|
||||
err = PreprocessBuiltinFunc(f, "script", doScript)
|
||||
assert.NilError(t, err)
|
||||
inst, err := r.CompileFile(f)
|
||||
assert.NilError(t, err)
|
||||
inst := r.BuildFile(f)
|
||||
bt, _ := inst.Value().MarshalJSON()
|
||||
assert.Equal(t, string(bt), tCase.expectJson)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOpenBasicLit(t *testing.T) {
|
||||
s, err := OpenBaiscLit(`
|
||||
f, err := OpenBaiscLit(cuecontext.New().CompileString(`
|
||||
a: 10
|
||||
a1: int
|
||||
b: "foo"
|
||||
@@ -251,7 +248,10 @@ c1: bool
|
||||
arr: [1,2]
|
||||
top: _
|
||||
bottom: _|_
|
||||
`)
|
||||
`))
|
||||
assert.NilError(t, err)
|
||||
val := cuecontext.New().BuildFile(f)
|
||||
s, err := toString(val)
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, s, `a: *10 | _
|
||||
a1: int
|
||||
@@ -261,7 +261,7 @@ c: *true | _
|
||||
c1: bool
|
||||
arr: *[1, 2] | [...]
|
||||
top: _
|
||||
bottom: _|_
|
||||
bottom: _|_ // explicit error (_|_ literal) in source
|
||||
`)
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/token"
|
||||
)
|
||||
|
||||
type nodewalker struct {
|
||||
@@ -83,6 +84,9 @@ func (nwk *nodewalker) walk(node ast.Node) {
|
||||
nwk.walk(n.X)
|
||||
nwk.walk(n.Y)
|
||||
|
||||
case *ast.UnaryExpr:
|
||||
nwk.walk(n.X)
|
||||
|
||||
case *ast.EmbedDecl:
|
||||
nwk.walk(n.Expr)
|
||||
|
||||
@@ -93,8 +97,10 @@ func (nwk *nodewalker) walk(node ast.Node) {
|
||||
case *ast.File:
|
||||
nwk.walkDeclList(n.Decls)
|
||||
|
||||
case *ast.ListComprehension:
|
||||
nwk.walk(n.Expr)
|
||||
case *ast.SliceExpr:
|
||||
if list, ok := n.X.(*ast.ListLit); ok {
|
||||
nwk.walkExprSlice(list.Elts, n.Low, n.High)
|
||||
}
|
||||
|
||||
case *ast.CallExpr:
|
||||
// close func need to be ignored
|
||||
@@ -119,6 +125,28 @@ func (nwk *nodewalker) walkExprList(list []ast.Expr) {
|
||||
}
|
||||
}
|
||||
|
||||
func (nwk *nodewalker) walkExprSlice(list []ast.Expr, low ast.Expr, high ast.Expr) {
|
||||
var (
|
||||
lowIndex = 0
|
||||
highIndex = len(list)
|
||||
)
|
||||
if v, ok := low.(*ast.BasicLit); ok && v.Kind == token.INT {
|
||||
lowIndex, _ = strconv.Atoi(v.Value)
|
||||
}
|
||||
if v, ok := high.(*ast.BasicLit); ok && v.Kind == token.INT {
|
||||
highIndex, _ = strconv.Atoi(v.Value)
|
||||
}
|
||||
for i, x := range list {
|
||||
if i < lowIndex || i >= highIndex {
|
||||
continue
|
||||
}
|
||||
origin := nwk.pos
|
||||
nwk.pos = append(nwk.pos, strconv.Itoa(i-lowIndex))
|
||||
nwk.walk(x)
|
||||
nwk.pos = origin
|
||||
}
|
||||
}
|
||||
|
||||
func (nwk *nodewalker) walkDeclList(list []ast.Decl) {
|
||||
for _, x := range list {
|
||||
nwk.walk(x)
|
||||
|
||||
@@ -19,10 +19,9 @@ package sets
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"github.com/bmizerany/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
@@ -87,25 +86,16 @@ func TestWalk(t *testing.T) {
|
||||
tags_str: strings.Compare(b,"c")
|
||||
}
|
||||
`,
|
||||
`a: [1, 2, 3]`,
|
||||
}
|
||||
|
||||
for _, src := range testCases {
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", src)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
re := require.New(t)
|
||||
inst := cuecontext.New().CompileString(src)
|
||||
nsrc, err := toString(inst.Value())
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
re.NoError(err)
|
||||
f, err := parser.ParseFile("-", nsrc)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
re.NoError(err)
|
||||
|
||||
newWalker(func(node ast.Node, ctx walkCtx) {
|
||||
if len(ctx.Pos()) == 0 {
|
||||
@@ -120,11 +110,9 @@ func TestWalk(t *testing.T) {
|
||||
}
|
||||
|
||||
n, err := lookUp(f, ctx.Pos()...)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
re.NoError(err)
|
||||
|
||||
assert.Equal(t, n, node, nsrc)
|
||||
re.Equal(n, node, nsrc)
|
||||
}).walk(f)
|
||||
}
|
||||
|
||||
@@ -144,10 +132,8 @@ func TestRemoveTmpVar(t *testing.T) {
|
||||
}
|
||||
`
|
||||
r := require.New(t)
|
||||
var runtime cue.Runtime
|
||||
inst, err := runtime.Compile("-", src)
|
||||
r.NoError(err)
|
||||
s, err := toString(inst.Value(), removeTmpVar)
|
||||
v := cuecontext.New().CompileString(src)
|
||||
s, err := toString(v, removeTmpVar)
|
||||
r.NoError(err)
|
||||
r.Equal(`spec: {
|
||||
list: [{
|
||||
|
||||
@@ -26,6 +26,7 @@ import (
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/build"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/literal"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"cuelang.org/go/cue/token"
|
||||
@@ -42,7 +43,7 @@ const DefaultPackageHeader = "package main\n"
|
||||
// Value is an object with cue.runtime and vendors
|
||||
type Value struct {
|
||||
v cue.Value
|
||||
r cue.Runtime
|
||||
r *cue.Context
|
||||
addImports func(instance *build.Instance) error
|
||||
}
|
||||
|
||||
@@ -58,6 +59,9 @@ func (val *Value) Error() error {
|
||||
if !v.Exists() {
|
||||
return errors.New("empty value")
|
||||
}
|
||||
if err := val.v.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
var gerr error
|
||||
v.Walk(func(value cue.Value) bool {
|
||||
if err := value.Eval().Err(); err != nil {
|
||||
@@ -156,18 +160,31 @@ func newValue(builder *build.Instance, pd *packages.PackageDiscover, tagTempl st
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var r cue.Runtime
|
||||
inst, err := r.Build(builder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := cuecontext.New()
|
||||
inst := r.BuildInstance(builder)
|
||||
val := new(Value)
|
||||
val.r = r
|
||||
val.v = inst.Value()
|
||||
val.v = inst
|
||||
val.addImports = addImports
|
||||
// do not check val.Err() error here, because the value may be filled later
|
||||
return val, nil
|
||||
}
|
||||
|
||||
// AddFile add file to the instance
|
||||
func AddFile(bi *build.Instance, filename string, src interface{}) error {
|
||||
if filename == "" {
|
||||
filename = "-"
|
||||
}
|
||||
file, err := parser.ParseFile(filename, src, parser.ParseComments)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := bi.AddSyntax(file); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TagFieldOrder add step tag.
|
||||
func TagFieldOrder(root *ast.File) error {
|
||||
i := 0
|
||||
@@ -241,51 +258,66 @@ func (vs *visitor) addAttrForExpr(node ast.Node, index *int) {
|
||||
// MakeValue generate an value with same runtime
|
||||
func (val *Value) MakeValue(s string) (*Value, error) {
|
||||
builder := &build.Instance{}
|
||||
if err := builder.AddFile("-", s); err != nil {
|
||||
file, err := parser.ParseFile("-", s, parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := builder.AddSyntax(file); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := val.addImports(builder); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
inst, err := val.r.Build(builder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
inst := val.r.BuildInstance(builder)
|
||||
v := new(Value)
|
||||
v.r = val.r
|
||||
v.v = inst.Value()
|
||||
v.v = inst
|
||||
v.addImports = val.addImports
|
||||
if v.Error() != nil {
|
||||
return nil, v.Error()
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func (val *Value) makeValueWithFile(files ...*ast.File) (*Value, error) {
|
||||
builder := &build.Instance{}
|
||||
newFile := &ast.File{}
|
||||
imports := map[string]*ast.ImportSpec{}
|
||||
for _, f := range files {
|
||||
if err := builder.AddSyntax(f); err != nil {
|
||||
return nil, err
|
||||
for _, importSpec := range f.Imports {
|
||||
if _, ok := imports[importSpec.Name.String()]; !ok {
|
||||
imports[importSpec.Name.String()] = importSpec
|
||||
}
|
||||
}
|
||||
newFile.Decls = append(newFile.Decls, f.Decls...)
|
||||
}
|
||||
|
||||
for _, imp := range imports {
|
||||
newFile.Imports = append(newFile.Imports, imp)
|
||||
}
|
||||
|
||||
if err := builder.AddSyntax(newFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := val.addImports(builder); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
inst, err := val.r.Build(builder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
inst := val.r.BuildInstance(builder)
|
||||
v := new(Value)
|
||||
v.r = val.r
|
||||
v.v = inst.Value()
|
||||
v.v = inst
|
||||
v.addImports = val.addImports
|
||||
return v, nil
|
||||
}
|
||||
|
||||
// FillRaw unify the value with the cue format string x at the given path.
|
||||
func (val *Value) FillRaw(x string, paths ...string) error {
|
||||
xInst, err := val.r.Compile("-", x)
|
||||
file, err := parser.ParseFile("-", x, parser.ParseComments)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v := val.v.Fill(xInst.Value(), paths...)
|
||||
xInst := val.r.BuildFile(file)
|
||||
v := val.v.FillPath(FieldPath(paths...), xInst)
|
||||
if v.Err() != nil {
|
||||
return v.Err()
|
||||
}
|
||||
@@ -296,7 +328,12 @@ func (val *Value) FillRaw(x string, paths ...string) error {
|
||||
// FillValueByScript unify the value x at the given script path.
|
||||
func (val *Value) FillValueByScript(x *Value, path string) error {
|
||||
if !strings.Contains(path, "[") {
|
||||
return val.FillObject(x, strings.Split(path, ".")...)
|
||||
newV := val.v.FillPath(FieldPath(path), x.v)
|
||||
if err := newV.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
val.v = newV
|
||||
return nil
|
||||
}
|
||||
s, err := x.String()
|
||||
if err != nil {
|
||||
@@ -343,19 +380,17 @@ func (val *Value) FillObject(x interface{}, paths ...string) error {
|
||||
}
|
||||
insert = v.v
|
||||
}
|
||||
newV := val.v.Fill(insert, paths...)
|
||||
if newV.Err() != nil {
|
||||
return newV.Err()
|
||||
}
|
||||
newV := val.v.FillPath(FieldPath(paths...), insert)
|
||||
// do not check newV.Err() error here, because the value may be filled later
|
||||
val.v = newV
|
||||
return nil
|
||||
}
|
||||
|
||||
// LookupValue reports the value at a path starting from val
|
||||
func (val *Value) LookupValue(paths ...string) (*Value, error) {
|
||||
v := val.v.Lookup(paths...)
|
||||
v := val.v.LookupPath(FieldPath(paths...))
|
||||
if !v.Exists() {
|
||||
return nil, errors.Errorf("var(path=%s) not exist", strings.Join(paths, "."))
|
||||
return nil, errors.Errorf("failed to lookup value: var(path=%s) not exist", strings.Join(paths, "."))
|
||||
}
|
||||
return &Value{
|
||||
v: v,
|
||||
@@ -364,6 +399,33 @@ func (val *Value) LookupValue(paths ...string) (*Value, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
func isScript(content string) (bool, error) {
|
||||
content = strings.TrimSpace(content)
|
||||
scriptFile, err := parser.ParseFile("-", content, parser.ParseComments)
|
||||
if err != nil {
|
||||
return false, errors.WithMessage(err, "parse script")
|
||||
}
|
||||
if len(scriptFile.Imports) != 0 {
|
||||
return true, nil
|
||||
}
|
||||
if len(scriptFile.Decls) == 0 || len(scriptFile.Decls) > 1 {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return !isSelector(scriptFile.Decls[0]), nil
|
||||
}
|
||||
|
||||
func isSelector(node ast.Node) bool {
|
||||
switch v := node.(type) {
|
||||
case *ast.EmbedDecl:
|
||||
return isSelector(v.Expr)
|
||||
case *ast.SelectorExpr, *ast.IndexExpr, *ast.Ident:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// LookupByScript reports the value by cue script.
|
||||
func (val *Value) LookupByScript(script string) (*Value, error) {
|
||||
var outputKey = "zz_output__"
|
||||
@@ -372,6 +434,14 @@ func (val *Value) LookupByScript(script string) (*Value, error) {
|
||||
if err != nil {
|
||||
return nil, errors.WithMessage(err, "parse script")
|
||||
}
|
||||
isScriptPath, err := isScript(script)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !isScriptPath {
|
||||
return val.LookupValue(script)
|
||||
}
|
||||
|
||||
raw, err := val.String()
|
||||
if err != nil {
|
||||
@@ -392,6 +462,7 @@ func (val *Value) LookupByScript(script string) (*Value, error) {
|
||||
|
||||
return newV.LookupValue(outputKey)
|
||||
}
|
||||
|
||||
func behindKey(file *ast.File, key string) {
|
||||
var (
|
||||
implDecls []ast.Decl
|
||||
@@ -456,68 +527,106 @@ func (val *Value) StepByList(handle func(name string, in *Value) (bool, error))
|
||||
|
||||
// StepByFields process the fields in order
|
||||
func (val *Value) StepByFields(handle func(name string, in *Value) (bool, error)) error {
|
||||
for i := 0; ; i++ {
|
||||
field, end, err := val.fieldIndex(i)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
iter := steps(val)
|
||||
for iter.next() {
|
||||
iter.do(handle)
|
||||
}
|
||||
return iter.err
|
||||
}
|
||||
|
||||
if end {
|
||||
return nil
|
||||
}
|
||||
stop, err := handle(field.Name, field.Value)
|
||||
if err != nil {
|
||||
return errors.WithMessagef(err, "step %s", field.Name)
|
||||
}
|
||||
if !isDef(field.Name) {
|
||||
if err := val.FillObject(field.Value, field.Name); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if stop {
|
||||
return nil
|
||||
}
|
||||
type stepsIterator struct {
|
||||
queue []*field
|
||||
index int
|
||||
target *Value
|
||||
err error
|
||||
stopped bool
|
||||
}
|
||||
|
||||
func steps(v *Value) *stepsIterator {
|
||||
return &stepsIterator{
|
||||
target: v,
|
||||
}
|
||||
}
|
||||
|
||||
func (val *Value) fieldIndex(index int) (*field, bool, error) {
|
||||
fields, err := val.fields()
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
func (iter *stepsIterator) next() bool {
|
||||
if iter.stopped {
|
||||
return false
|
||||
}
|
||||
if index >= len(fields) {
|
||||
return nil, true, nil
|
||||
if iter.err != nil {
|
||||
return false
|
||||
}
|
||||
return fields[index], false, nil
|
||||
if iter.queue != nil {
|
||||
iter.index++
|
||||
}
|
||||
iter.assemble()
|
||||
return iter.index <= len(iter.queue)-1
|
||||
}
|
||||
|
||||
func (val *Value) fields() ([]*field, error) {
|
||||
st, err := val.v.Struct()
|
||||
func (iter *stepsIterator) assemble() {
|
||||
st, err := iter.target.v.Struct()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
iter.err = err
|
||||
return
|
||||
}
|
||||
var fields []*field
|
||||
|
||||
filters := map[string]struct{}{}
|
||||
for _, item := range iter.queue {
|
||||
filters[item.Name] = struct{}{}
|
||||
}
|
||||
var addFields []*field
|
||||
for i := 0; i < st.Len(); i++ {
|
||||
v := st.Field(i)
|
||||
attr := v.Value.Attribute("step")
|
||||
name := st.Field(i).Name
|
||||
attr := st.Field(i).Value.Attribute("step")
|
||||
no, err := attr.Int(0)
|
||||
if err != nil {
|
||||
no = 100
|
||||
if v.Name == "#do" || v.Name == "#provider" {
|
||||
if name == "#do" || name == "#provider" {
|
||||
no = 0
|
||||
}
|
||||
}
|
||||
fields = append(fields, &field{
|
||||
no: no,
|
||||
Name: v.Name,
|
||||
Value: &Value{
|
||||
r: val.r,
|
||||
v: v.Value,
|
||||
addImports: val.addImports,
|
||||
}})
|
||||
if _, ok := filters[name]; !ok {
|
||||
addFields = append(addFields, &field{
|
||||
Name: name,
|
||||
no: no,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
suffixItems := append(addFields, iter.queue[iter.index:]...)
|
||||
sort.Sort(sortFields(suffixItems))
|
||||
iter.queue = append(iter.queue[:iter.index], suffixItems...)
|
||||
}
|
||||
|
||||
func (iter *stepsIterator) value() *Value {
|
||||
v := iter.target.v.LookupPath(FieldPath(iter.name()))
|
||||
return &Value{
|
||||
r: iter.target.r,
|
||||
v: v,
|
||||
addImports: iter.target.addImports,
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *stepsIterator) name() string {
|
||||
return iter.queue[iter.index].Name
|
||||
}
|
||||
|
||||
func (iter *stepsIterator) do(handle func(name string, in *Value) (bool, error)) {
|
||||
if iter.err != nil {
|
||||
return
|
||||
}
|
||||
v := iter.value()
|
||||
stopped, err := handle(iter.name(), v)
|
||||
if err != nil {
|
||||
iter.err = err
|
||||
return
|
||||
}
|
||||
iter.stopped = stopped
|
||||
if !isDef(iter.name()) {
|
||||
if err := iter.target.FillObject(v, iter.name()); err != nil {
|
||||
iter.err = err
|
||||
return
|
||||
}
|
||||
}
|
||||
sort.Sort(sortFields(fields))
|
||||
return fields, nil
|
||||
}
|
||||
|
||||
type sortFields []*field
|
||||
@@ -535,13 +644,7 @@ func (sf sortFields) Swap(i, j int) {
|
||||
|
||||
// Field return the cue value corresponding to the specified field
|
||||
func (val *Value) Field(label string) (cue.Value, error) {
|
||||
var v cue.Value
|
||||
if isDef(label) {
|
||||
v = val.v.LookupDef(label)
|
||||
} else {
|
||||
v = val.v.Lookup(label)
|
||||
}
|
||||
|
||||
v := val.v.LookupPath(cue.ParsePath(label))
|
||||
if !v.Exists() {
|
||||
return v, errors.Errorf("label %s not found", label)
|
||||
}
|
||||
@@ -592,19 +695,13 @@ func (val *Value) GetBool(paths ...string) (bool, error) {
|
||||
|
||||
// OpenCompleteValue make that the complete value can be modified.
|
||||
func (val *Value) OpenCompleteValue() error {
|
||||
s, err := val.String()
|
||||
newS, err := sets.OpenBaiscLit(val.CueValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newS, err := sets.OpenBaiscLit(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v, err := val.MakeValue(newS)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
val.v = v.CueValue()
|
||||
|
||||
v := cuecontext.New().BuildFile(newS)
|
||||
val.v = v
|
||||
return nil
|
||||
}
|
||||
func isDef(s string) bool {
|
||||
@@ -645,12 +742,17 @@ func (a *assembler) installTo(expr ast.Expr) error {
|
||||
return err
|
||||
}
|
||||
case *ast.SelectorExpr:
|
||||
if err := a.installTo(v.Sel); err != nil {
|
||||
return err
|
||||
if ident, ok := v.Sel.(*ast.Ident); ok {
|
||||
if err := a.installTo(ident); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return errors.New("invalid sel type in selector")
|
||||
}
|
||||
if err := a.installTo(v.X); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case *ast.Ident:
|
||||
a.fill2Path(v.String())
|
||||
case *ast.BasicLit:
|
||||
@@ -668,3 +770,40 @@ func (a *assembler) installTo(expr ast.Expr) error {
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// makePath creates a Path from a sequence of string.
|
||||
func makePath(paths ...string) string {
|
||||
mergedPath := ""
|
||||
if len(paths) == 0 {
|
||||
return mergedPath
|
||||
}
|
||||
mergedPath = paths[0]
|
||||
if mergedPath == "" || (len(paths) == 1 && (strings.Contains(mergedPath, ".") || strings.Contains(mergedPath, "["))) {
|
||||
return paths[0]
|
||||
}
|
||||
if !strings.HasPrefix(mergedPath, "_") && !strings.HasPrefix(mergedPath, "#") {
|
||||
mergedPath = fmt.Sprintf("\"%s\"", mergedPath)
|
||||
}
|
||||
for _, p := range paths[1:] {
|
||||
if !strings.HasPrefix(p, "#") {
|
||||
mergedPath += fmt.Sprintf("[\"%s\"]", p)
|
||||
} else {
|
||||
mergedPath += fmt.Sprintf(".%s", p)
|
||||
}
|
||||
}
|
||||
return mergedPath
|
||||
}
|
||||
|
||||
func isNumber(s string) bool {
|
||||
_, err := strconv.ParseInt(s, 10, 64)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// FieldPath return the cue path of the given paths
|
||||
func FieldPath(paths ...string) cue.Path {
|
||||
s := makePath(paths...)
|
||||
if isNumber(s) {
|
||||
return cue.MakePath(cue.Str(s))
|
||||
}
|
||||
return cue.ParsePath(s)
|
||||
}
|
||||
|
||||
@@ -18,9 +18,13 @@ package value
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/sets"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gotest.tools/assert"
|
||||
)
|
||||
|
||||
@@ -99,14 +103,14 @@ step3: {
|
||||
prefix: 101
|
||||
value: 102
|
||||
}
|
||||
step4: {
|
||||
prefix: 102
|
||||
value: 103
|
||||
}
|
||||
step5: {
|
||||
prefix: 103
|
||||
value: 104
|
||||
}
|
||||
step4: {
|
||||
prefix: 102
|
||||
value: 103
|
||||
}
|
||||
`},
|
||||
|
||||
{base: `
|
||||
@@ -202,6 +206,8 @@ step3: "3"
|
||||
}
|
||||
|
||||
func TestStepWithTag(t *testing.T) {
|
||||
// TODO(@FogDong): add if condition test cases back.
|
||||
// refer to issue: https://github.com/cue-lang/cue/issues/1826
|
||||
testCases := []struct {
|
||||
base string
|
||||
expected string
|
||||
@@ -211,9 +217,6 @@ step1: {}
|
||||
step2: {prefix: step1.value}
|
||||
step3: {prefix: step2.value}
|
||||
step4: {prefix: step3.value}
|
||||
if step4.value > 100 {
|
||||
step5: {}
|
||||
}
|
||||
step5: {
|
||||
value: *100|int
|
||||
}
|
||||
@@ -239,9 +242,7 @@ step5: {
|
||||
`}, {base: `
|
||||
step1: {}
|
||||
step2: {prefix: step1.value}
|
||||
if step2.value > 100 {
|
||||
step2_3: {prefix: step2.value}
|
||||
}
|
||||
step2_3: {prefix: step2.value}
|
||||
step3: {prefix: step2.value}
|
||||
step4: {prefix: step3.value}
|
||||
`,
|
||||
@@ -268,9 +269,7 @@ step4: {
|
||||
step2: {prefix: step1.value} @step(2)
|
||||
step1: {} @step(1)
|
||||
step3: {prefix: step2.value} @step(4)
|
||||
if step2.value > 100 {
|
||||
step2_3: {prefix: step2.value} @step(3)
|
||||
}
|
||||
step2_3: {prefix: step2.value} @step(3)
|
||||
`,
|
||||
expected: `step2: {
|
||||
prefix: 100
|
||||
@@ -292,9 +291,7 @@ step2_3: {
|
||||
{base: `
|
||||
step2: {prefix: step1.value}
|
||||
step1: {} @step(-1)
|
||||
if step2.value > 100 {
|
||||
step2_3: {prefix: step2.value}
|
||||
}
|
||||
step2_3: {prefix: step2.value}
|
||||
step3: {prefix: step2.value}
|
||||
`,
|
||||
expected: `step2: {
|
||||
@@ -314,9 +311,10 @@ step3: {
|
||||
} @step(3)
|
||||
`}}
|
||||
|
||||
for _, tCase := range testCases {
|
||||
for i, tCase := range testCases {
|
||||
r := require.New(t)
|
||||
val, err := NewValue(tCase.base, nil, "", TagFieldOrder)
|
||||
assert.NilError(t, err)
|
||||
r.NoError(err)
|
||||
number := 99
|
||||
err = val.StepByFields(func(name string, in *Value) (bool, error) {
|
||||
number++
|
||||
@@ -324,10 +322,10 @@ step3: {
|
||||
"value": number,
|
||||
})
|
||||
})
|
||||
assert.NilError(t, err)
|
||||
str, err := val.String()
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, str, tCase.expected)
|
||||
r.NoError(err)
|
||||
str, err := sets.ToString(val.CueValue())
|
||||
r.NoError(err)
|
||||
r.Equal(str, tCase.expected, fmt.Sprintf("testPatch for case(no:%d) %s", i, str))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -371,7 +369,7 @@ func TestStepByList(t *testing.T) {
|
||||
var i int64
|
||||
err = v.StepByList(func(name string, in *Value) (bool, error) {
|
||||
i++
|
||||
num, err := in.CueValue().Lookup("step").Int64()
|
||||
num, err := in.CueValue().LookupPath(FieldPath("step")).Int64()
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, num, i)
|
||||
return false, nil
|
||||
@@ -409,8 +407,8 @@ func TestValue(t *testing.T) {
|
||||
provider: xxx
|
||||
`
|
||||
val, err := NewValue(caseError, nil, "")
|
||||
assert.Equal(t, err != nil, true)
|
||||
assert.Equal(t, val == nil, true)
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, val.Error() != nil, true)
|
||||
|
||||
val, err = NewValue(":", nil, "")
|
||||
assert.Equal(t, err != nil, true)
|
||||
@@ -429,6 +427,8 @@ do: "apply"
|
||||
assert.Equal(t, err != nil, true)
|
||||
_, err = val.MakeValue(":")
|
||||
assert.Equal(t, err != nil, true)
|
||||
_, err = val.MakeValue("test: _|_")
|
||||
assert.Equal(t, err != nil, true)
|
||||
err = val.FillRaw(caseError)
|
||||
assert.Equal(t, err != nil, true)
|
||||
assert.Equal(t, originCue, val.CueValue())
|
||||
@@ -448,8 +448,8 @@ close({provider: int})
|
||||
cv, err = val.MakeValue(caseClose)
|
||||
assert.NilError(t, err)
|
||||
err = val.FillObject(cv)
|
||||
assert.Equal(t, err != nil, true)
|
||||
assert.Equal(t, originCue, val.CueValue())
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, val.Error() != nil, true)
|
||||
|
||||
_, err = val.LookupValue("abc")
|
||||
assert.Equal(t, err != nil, true)
|
||||
@@ -476,6 +476,84 @@ x: #X & {
|
||||
assert.NilError(t, err)
|
||||
}
|
||||
|
||||
func TestLookupValue(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
str string
|
||||
paths []string
|
||||
}{
|
||||
{
|
||||
name: "def",
|
||||
str: `
|
||||
#x: "v"
|
||||
`,
|
||||
paths: []string{"#x"},
|
||||
},
|
||||
{
|
||||
name: "def in def",
|
||||
str: `
|
||||
#x: {
|
||||
#y: "v"
|
||||
}
|
||||
`,
|
||||
paths: []string{"#x", "#y"},
|
||||
},
|
||||
{
|
||||
name: "num",
|
||||
str: `
|
||||
"1": {
|
||||
"2": "v"
|
||||
}
|
||||
`,
|
||||
paths: []string{"1", "2"},
|
||||
},
|
||||
{
|
||||
name: "invalid",
|
||||
str: `
|
||||
"a-b": {
|
||||
"b-c": "v"
|
||||
}
|
||||
`,
|
||||
paths: []string{"a-b", "b-c"},
|
||||
},
|
||||
{
|
||||
name: "concrete path",
|
||||
str: `
|
||||
a: {
|
||||
"b-c": "v"
|
||||
}
|
||||
`,
|
||||
paths: []string{`a["b-c"]`},
|
||||
},
|
||||
{
|
||||
name: "concrete path with num",
|
||||
str: `
|
||||
a: [
|
||||
{
|
||||
key: "v"
|
||||
}
|
||||
]
|
||||
`,
|
||||
paths: []string{`a[0].key`},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
r := require.New(t)
|
||||
v, err := NewValue(tc.str, nil, "")
|
||||
r.NoError(err)
|
||||
result, err := v.LookupValue(tc.paths...)
|
||||
r.NoError(err)
|
||||
r.NoError(result.Error())
|
||||
s, err := sets.ToString(result.v)
|
||||
r.Equal(s, `"v"
|
||||
`)
|
||||
r.NoError(err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestValueError(t *testing.T) {
|
||||
caseOk := `
|
||||
provider: "kube"
|
||||
@@ -485,8 +563,7 @@ do: "apply"
|
||||
assert.NilError(t, err)
|
||||
err = val.FillRaw(`
|
||||
provider: "conflict"`)
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, val.Error() != nil, true)
|
||||
assert.Equal(t, err != nil, true)
|
||||
|
||||
val, err = NewValue(caseOk, nil, "")
|
||||
assert.NilError(t, err)
|
||||
@@ -658,7 +735,7 @@ strings.Join(apply.arr,".")+"$"`,
|
||||
{
|
||||
src: `
|
||||
op: string
|
||||
op: 12
|
||||
op: "help"
|
||||
`,
|
||||
script: `op(1`,
|
||||
err: "parse script: expected ')', found 'EOF'",
|
||||
@@ -669,7 +746,7 @@ strings.Join(apply.arr,".")+"$"`,
|
||||
op: "help"
|
||||
`,
|
||||
script: `oss`,
|
||||
err: "zz_output__: reference \"oss\" not found",
|
||||
err: "failed to lookup value: var(path=oss) not exist",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -869,7 +946,7 @@ func TestFillByScript(t *testing.T) {
|
||||
raw: `a: b: [{x: y:[{name: "key"}]}]`,
|
||||
path: "a.b[0].x.y[0].name",
|
||||
v: `"foo"`,
|
||||
err: "a.b.0.x.y.0.name: conflicting values \"key\" and \"foo\"",
|
||||
err: "remake value: a.b.0.x.y.0.name: conflicting values \"foo\" and \"key\"",
|
||||
},
|
||||
{
|
||||
name: "filled value with wrong cue format",
|
||||
|
||||
@@ -45,6 +45,7 @@ import (
|
||||
"k8s.io/utils/pointer"
|
||||
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
"github.com/oam-dev/kubevela/pkg/oam/discoverymapper"
|
||||
"github.com/oam-dev/kubevela/pkg/oam/util"
|
||||
)
|
||||
@@ -99,7 +100,7 @@ var _ = Describe("Package discovery resources for definition from K8s APIServer"
|
||||
|
||||
By("test ingress in kube package")
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
err := bi.AddFile("-", `
|
||||
err := value.AddFile(bi, "-", `
|
||||
import (
|
||||
kube "kube/networking.k8s.io/v1beta1"
|
||||
)
|
||||
@@ -132,9 +133,9 @@ parameter: {
|
||||
}
|
||||
}`)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
inst, err := pd.ImportPackagesAndBuildInstance(bi)
|
||||
inst, err := pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err := model.NewBase(inst.Lookup("output"))
|
||||
base, err := model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err := base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
@@ -158,7 +159,7 @@ parameter: {
|
||||
})).Should(BeEquivalentTo(""))
|
||||
By("test Invalid Import path")
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
err = value.AddFile(bi, "-", `
|
||||
import (
|
||||
kube "kube/networking.k8s.io/v1"
|
||||
)
|
||||
@@ -179,15 +180,16 @@ parameter: {
|
||||
name: "myapp"
|
||||
image: "nginx"
|
||||
}`)
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
_, err = model.NewBase(inst.Lookup("output"))
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
_, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).ShouldNot(BeNil())
|
||||
Expect(err.Error()).Should(Equal("_|_ // undefined field \"#Deployment\""))
|
||||
|
||||
By("test Deployment in kube package")
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
err = value.AddFile(bi, "-", `
|
||||
import (
|
||||
kube "kube/apps/v1"
|
||||
)
|
||||
@@ -207,9 +209,10 @@ parameter: {
|
||||
name: "myapp"
|
||||
image: "nginx"
|
||||
}`)
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.Lookup("output"))
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err = base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
@@ -229,7 +232,7 @@ parameter: {
|
||||
|
||||
By("test Secret in kube package")
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
err = value.AddFile(bi, "-", `
|
||||
import (
|
||||
kube "kube/v1"
|
||||
)
|
||||
@@ -243,9 +246,10 @@ output: {
|
||||
parameter: {
|
||||
name: "myapp"
|
||||
}`)
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.Lookup("output"))
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err = base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
@@ -257,7 +261,7 @@ parameter: {
|
||||
|
||||
By("test Service in kube package")
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
err = value.AddFile(bi, "-", `
|
||||
import (
|
||||
kube "kube/v1"
|
||||
)
|
||||
@@ -271,9 +275,10 @@ output: {
|
||||
parameter: {
|
||||
name: "myapp"
|
||||
}`)
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.Lookup("output"))
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err = base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
@@ -360,7 +365,7 @@ parameter: {
|
||||
}, time.Second*30, time.Millisecond*300).Should(BeNil())
|
||||
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
err = bi.AddFile("-", `
|
||||
err = value.AddFile(bi, "-", `
|
||||
import (
|
||||
kv1 "kube/example.com/v1"
|
||||
)
|
||||
@@ -371,9 +376,9 @@ output: {
|
||||
}
|
||||
`)
|
||||
Expect(err).Should(BeNil())
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.Lookup("output"))
|
||||
base, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err = base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
@@ -392,7 +397,7 @@ output: {
|
||||
|
||||
By("test ingress in kube package")
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
err := bi.AddFile("-", `
|
||||
err := value.AddFile(bi, "-", `
|
||||
import (
|
||||
network "k8s.io/networking/v1beta1"
|
||||
)
|
||||
@@ -423,9 +428,9 @@ parameter: {
|
||||
}
|
||||
}`)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
inst, err := pd.ImportPackagesAndBuildInstance(bi)
|
||||
inst, err := pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err := model.NewBase(inst.Lookup("output"))
|
||||
base, err := model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err := base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
@@ -449,7 +454,7 @@ parameter: {
|
||||
})).Should(BeEquivalentTo(""))
|
||||
By("test Invalid Import path")
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
value.AddFile(bi, "-", `
|
||||
import (
|
||||
"k8s.io/networking/v1"
|
||||
)
|
||||
@@ -470,15 +475,15 @@ parameter: {
|
||||
name: "myapp"
|
||||
image: "nginx"
|
||||
}`)
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
_, err = model.NewBase(inst.Lookup("output"))
|
||||
_, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).ShouldNot(BeNil())
|
||||
Expect(err.Error()).Should(Equal("_|_ // undefined field \"#Deployment\""))
|
||||
|
||||
By("test Deployment in kube package")
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
value.AddFile(bi, "-", `
|
||||
import (
|
||||
apps "k8s.io/apps/v1"
|
||||
)
|
||||
@@ -498,9 +503,9 @@ parameter: {
|
||||
name: "myapp"
|
||||
image: "nginx"
|
||||
}`)
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.Lookup("output"))
|
||||
base, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err = base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
@@ -520,7 +525,7 @@ parameter: {
|
||||
|
||||
By("test Secret in kube package")
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
value.AddFile(bi, "-", `
|
||||
import (
|
||||
"k8s.io/core/v1"
|
||||
)
|
||||
@@ -534,9 +539,9 @@ output: {
|
||||
parameter: {
|
||||
name: "myapp"
|
||||
}`)
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.Lookup("output"))
|
||||
base, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err = base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
@@ -548,7 +553,7 @@ parameter: {
|
||||
|
||||
By("test Service in kube package")
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
value.AddFile(bi, "-", `
|
||||
import (
|
||||
"k8s.io/core/v1"
|
||||
)
|
||||
@@ -562,9 +567,9 @@ output: {
|
||||
parameter: {
|
||||
name: "myapp"
|
||||
}`)
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.Lookup("output"))
|
||||
base, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err = base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
@@ -651,7 +656,7 @@ parameter: {
|
||||
}, time.Second*30, time.Millisecond*300).Should(BeNil())
|
||||
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
err = bi.AddFile("-", `
|
||||
err = value.AddFile(bi, "-", `
|
||||
import (
|
||||
ev1 "example.com/v1"
|
||||
)
|
||||
@@ -662,9 +667,9 @@ output: {
|
||||
}
|
||||
`)
|
||||
Expect(err).Should(BeNil())
|
||||
inst, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
inst, err = pd.ImportPackagesAndBuildValue(bi)
|
||||
Expect(err).Should(BeNil())
|
||||
base, err = model.NewBase(inst.Lookup("output"))
|
||||
base, err = model.NewBase(inst.LookupPath(value.FieldPath("output")))
|
||||
Expect(err).Should(BeNil())
|
||||
data, err = base.Unstructured()
|
||||
Expect(err).Should(BeNil())
|
||||
|
||||
@@ -26,6 +26,8 @@ import (
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/build"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"cuelang.org/go/cue/token"
|
||||
"cuelang.org/go/encoding/jsonschema"
|
||||
"github.com/pkg/errors"
|
||||
@@ -106,6 +108,7 @@ func (pd *PackageDiscover) ImportBuiltinPackagesFor(bi *build.Instance) {
|
||||
}
|
||||
|
||||
// ImportPackagesAndBuildInstance Combine import built-in packages and build cue template together to avoid data race
|
||||
// nolint:staticcheck
|
||||
func (pd *PackageDiscover) ImportPackagesAndBuildInstance(bi *build.Instance) (inst *cue.Instance, err error) {
|
||||
var r cue.Runtime
|
||||
if pd == nil {
|
||||
@@ -120,6 +123,21 @@ func (pd *PackageDiscover) ImportPackagesAndBuildInstance(bi *build.Instance) (i
|
||||
return r.Build(bi)
|
||||
}
|
||||
|
||||
// ImportPackagesAndBuildValue Combine import built-in packages and build cue template together to avoid data race
|
||||
func (pd *PackageDiscover) ImportPackagesAndBuildValue(bi *build.Instance) (val cue.Value, err error) {
|
||||
cuectx := cuecontext.New()
|
||||
if pd == nil {
|
||||
return cuectx.BuildInstance(bi), nil
|
||||
}
|
||||
pd.ImportBuiltinPackagesFor(bi)
|
||||
if err := stdlib.AddImportsFor(bi, ""); err != nil {
|
||||
return cue.Value{}, err
|
||||
}
|
||||
pd.mutex.Lock()
|
||||
defer pd.mutex.Unlock()
|
||||
return cuectx.BuildInstance(bi), nil
|
||||
}
|
||||
|
||||
// ListPackageKinds list packages and their kinds
|
||||
func (pd *PackageDiscover) ListPackageKinds() map[string][]VersionKind {
|
||||
pd.mutex.RLock()
|
||||
@@ -189,7 +207,11 @@ func (pd *PackageDiscover) pkgBuild(packages map[string]*pkgInstance, pkgName st
|
||||
DefinitionName: "#" + dGVK.Kind,
|
||||
})
|
||||
|
||||
if err := pkg.AddFile(dGVK.reverseString(), def); err != nil {
|
||||
file, err := parser.ParseFile(dGVK.reverseString(), def)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := pkg.AddSyntax(file); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -199,19 +221,21 @@ func (pd *PackageDiscover) pkgBuild(packages map[string]*pkgInstance, pkgName st
|
||||
}
|
||||
|
||||
func (pd *PackageDiscover) addKubeCUEPackagesFromCluster(apiSchema string) error {
|
||||
var r cue.Runtime
|
||||
oaInst, err := r.Compile("-", apiSchema)
|
||||
file, err := parser.ParseFile("-", apiSchema)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
oaInst := cuecontext.New().BuildFile(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dgvkMapper := make(map[string]domainGroupVersionKind)
|
||||
pathValue := oaInst.Value().Lookup("paths")
|
||||
pathValue := oaInst.LookupPath(cue.ParsePath("paths"))
|
||||
if pathValue.Exists() {
|
||||
if st, err := pathValue.Struct(); err == nil {
|
||||
iter := st.Fields()
|
||||
for iter.Next() {
|
||||
gvk := iter.Value().Lookup("post",
|
||||
"x-kubernetes-group-version-kind")
|
||||
gvk := iter.Value().LookupPath(cue.ParsePath("post[\"x-kubernetes-group-version-kind\"]"))
|
||||
if gvk.Exists() {
|
||||
if v, err := getDGVK(gvk); err == nil {
|
||||
dgvkMapper[v.reverseString()] = v
|
||||
@@ -398,16 +422,16 @@ func (pkg *pkgInstance) processOpenAPIFile(f *ast.File) {
|
||||
|
||||
func getDGVK(v cue.Value) (ret domainGroupVersionKind, err error) {
|
||||
gvk := metav1.GroupVersionKind{}
|
||||
gvk.Group, err = v.Lookup("group").String()
|
||||
gvk.Group, err = v.LookupPath(cue.ParsePath("group")).String()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
gvk.Version, err = v.Lookup("version").String()
|
||||
gvk.Version, err = v.LookupPath(cue.ParsePath("version")).String()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
gvk.Kind, err = v.Lookup("kind").String()
|
||||
gvk.Kind, err = v.LookupPath(cue.ParsePath("kind")).String()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/build"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"cuelang.org/go/cue/token"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
@@ -347,12 +348,12 @@ func TestPackage(t *testing.T) {
|
||||
}
|
||||
assert.Equal(t, cmp.Diff(mypd.ListPackageKinds(), expectPkgKinds), "")
|
||||
|
||||
exceptObj := `output: close({
|
||||
kind: "Bucket"
|
||||
// TODO: fix losing close struct in cue
|
||||
exceptObj := `output: {
|
||||
apiVersion: "apps.test.io/v1"
|
||||
type: "alicloud_oss_bucket"
|
||||
acl: "public-read-write" | "public-read" | *"private"
|
||||
dataRedundancyType?: "ZRS" | *"LRS"
|
||||
kind: "Bucket"
|
||||
acl: *"private" | "public-read" | "public-read-write"
|
||||
dataRedundancyType?: "LRS" | "ZRS" | *"LRS"
|
||||
dataSourceRef?: {
|
||||
dsPath: string
|
||||
}
|
||||
@@ -360,12 +361,6 @@ func TestPackage(t *testing.T) {
|
||||
importKey: string
|
||||
}
|
||||
output: {
|
||||
{[!~"^(bucketName|extranetEndpoint|intranetEndpoint|masterUserId)$"]: {
|
||||
outRef: string
|
||||
} | {
|
||||
// Example: demoVpc.vpcId
|
||||
valueRef: string
|
||||
}}
|
||||
bucketName: {
|
||||
outRef: "self.name"
|
||||
}
|
||||
@@ -403,30 +398,41 @@ func TestPackage(t *testing.T) {
|
||||
valueRef: string
|
||||
}
|
||||
}
|
||||
storageClass?: "IA" | "Archive" | "ColdArchive" | *"Standard"
|
||||
})
|
||||
storageClass?: "Standard" | "IA" | "Archive" | "ColdArchive" | *"Standard"
|
||||
type: "alicloud_oss_bucket"
|
||||
}
|
||||
`
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
file, err := parser.ParseFile("-", `
|
||||
import "test.io/apps/v1"
|
||||
output: v1.#Bucket
|
||||
`)
|
||||
inst, err := mypd.ImportPackagesAndBuildInstance(bi)
|
||||
assert.NilError(t, err)
|
||||
base, err := model.NewBase(inst.Value())
|
||||
err = bi.AddSyntax(file)
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, base.String(), exceptObj)
|
||||
inst, err := mypd.ImportPackagesAndBuildValue(bi)
|
||||
assert.NilError(t, err)
|
||||
base, err := model.NewBase(inst)
|
||||
assert.NilError(t, err)
|
||||
s, err := base.String()
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, s, exceptObj)
|
||||
|
||||
bi = build.NewContext().NewInstance("", nil)
|
||||
bi.AddFile("-", `
|
||||
file, err = parser.ParseFile("-", `
|
||||
import "kube/apps.test.io/v1"
|
||||
output: v1.#Bucket
|
||||
`)
|
||||
inst, err = mypd.ImportPackagesAndBuildInstance(bi)
|
||||
assert.NilError(t, err)
|
||||
base, err = model.NewBase(inst.Value())
|
||||
err = bi.AddSyntax(file)
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, base.String(), exceptObj)
|
||||
inst, err = mypd.ImportPackagesAndBuildValue(bi)
|
||||
assert.NilError(t, err)
|
||||
base, err = model.NewBase(inst)
|
||||
assert.NilError(t, err)
|
||||
s, err = base.String()
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, s, exceptObj)
|
||||
}
|
||||
|
||||
func TestProcessFile(t *testing.T) {
|
||||
@@ -445,25 +451,22 @@ func TestProcessFile(t *testing.T) {
|
||||
assert.NilError(t, err)
|
||||
testPkg := newPackage("foo")
|
||||
testPkg.processOpenAPIFile(file)
|
||||
cuectx := cuecontext.New()
|
||||
inst := cuectx.BuildFile(file)
|
||||
|
||||
var r cue.Runtime
|
||||
inst, err := r.CompileFile(file)
|
||||
assert.NilError(t, err)
|
||||
testCasesInst, err := r.Compile("-", `
|
||||
#Definition: {}
|
||||
case1: #Definition & {additionalProperty: "test"}
|
||||
testCasesInst := cuectx.CompileString(`
|
||||
#Definition: {}
|
||||
case1: #Definition & {additionalProperty: "test"}
|
||||
|
||||
case2: #Definition & {
|
||||
metadata: {
|
||||
additionalProperty: "test"
|
||||
}
|
||||
case2: #Definition & {
|
||||
metadata: {
|
||||
additionalProperty: "test"
|
||||
}
|
||||
}
|
||||
`)
|
||||
assert.NilError(t, err)
|
||||
retInst, err := inst.Fill(testCasesInst.Value())
|
||||
assert.NilError(t, err)
|
||||
assert.Error(t, retInst.Lookup("case1").Err(), "case1: field \"additionalProperty\" not allowed in closed struct")
|
||||
assert.Error(t, retInst.Lookup("case2", "metadata").Err(), "case2.metadata: field \"additionalProperty\" not allowed in closed struct")
|
||||
retInst := inst.FillPath(cue.ParsePath(""), testCasesInst.Value())
|
||||
assert.Error(t, retInst.LookupPath(cue.ParsePath("case1")).Err(), "case1.additionalProperty: field not allowed")
|
||||
assert.Error(t, retInst.LookupPath(cue.ParsePath("case2.metadata")).Err(), "case2.metadata.additionalProperty: field not allowed")
|
||||
}
|
||||
|
||||
func TestMount(t *testing.T) {
|
||||
@@ -486,10 +489,10 @@ func TestGetDGVK(t *testing.T) {
|
||||
}
|
||||
}
|
||||
`
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", srcTmpl)
|
||||
file, err := parser.ParseFile("-", srcTmpl)
|
||||
assert.NilError(t, err)
|
||||
gvk, err := getDGVK(inst.Value().Lookup("x-kubernetes-group-version-kind"))
|
||||
inst := cuecontext.New().BuildFile(file)
|
||||
gvk, err := getDGVK(inst.Value().LookupPath(cue.ParsePath("\"x-kubernetes-group-version-kind\"")))
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, gvk, domainGroupVersionKind{
|
||||
Domain: "test.io",
|
||||
@@ -508,9 +511,8 @@ func TestGetDGVK(t *testing.T) {
|
||||
}
|
||||
}
|
||||
`
|
||||
inst, err = r.Compile("-", srcTmpl)
|
||||
assert.NilError(t, err)
|
||||
gvk, err = getDGVK(inst.Value().Lookup("x-kubernetes-group-version-kind"))
|
||||
inst = cuecontext.New().CompileString(srcTmpl)
|
||||
gvk, err = getDGVK(inst.LookupPath(cue.ParsePath("\"x-kubernetes-group-version-kind\"")))
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, gvk, domainGroupVersionKind{
|
||||
Group: "test.io",
|
||||
|
||||
@@ -201,7 +201,11 @@ func (ctx *templateContext) BaseContextFile() (string, error) {
|
||||
}
|
||||
|
||||
if ctx.base != nil {
|
||||
buff += fmt.Sprintf(model.OutputFieldName+": %s\n", structMarshal(ctx.base.String()))
|
||||
base, err := ctx.base.String()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
buff += fmt.Sprintf(model.OutputFieldName+": %s\n", structMarshal(base))
|
||||
}
|
||||
|
||||
if ctx.components != nil {
|
||||
@@ -215,7 +219,11 @@ func (ctx *templateContext) BaseContextFile() (string, error) {
|
||||
if len(ctx.auxiliaries) > 0 {
|
||||
var auxLines []string
|
||||
for _, auxiliary := range ctx.auxiliaries {
|
||||
auxLines = append(auxLines, fmt.Sprintf("\"%s\": %s", auxiliary.Name, structMarshal(auxiliary.Ins.String())))
|
||||
aux, err := auxiliary.Ins.String()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
auxLines = append(auxLines, fmt.Sprintf("\"%s\": %s", auxiliary.Name, structMarshal(aux)))
|
||||
}
|
||||
if len(auxLines) > 0 {
|
||||
buff += fmt.Sprintf(model.OutputsFieldName+": {%s}\n", strings.Join(auxLines, "\n"))
|
||||
|
||||
@@ -19,10 +19,11 @@ package process
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"github.com/bmizerany/assert"
|
||||
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
)
|
||||
|
||||
func TestContext(t *testing.T) {
|
||||
@@ -30,13 +31,8 @@ func TestContext(t *testing.T) {
|
||||
image: "myserver"
|
||||
`
|
||||
|
||||
var r cue.Runtime
|
||||
inst, err := r.Compile("-", baseTemplate)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
base, err := model.NewBase(inst.Value())
|
||||
inst := cuecontext.New().CompileString(baseTemplate)
|
||||
base, err := model.NewBase(inst)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
@@ -47,13 +43,9 @@ image: "myserver"
|
||||
kind: "ConfigMap"
|
||||
`
|
||||
|
||||
svcInst, err := r.Compile("-", serviceTemplate)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
svcInst := cuecontext.New().CompileString(serviceTemplate)
|
||||
|
||||
svcIns, err := model.NewOther(svcInst.Value())
|
||||
svcIns, err := model.NewOther(svcInst)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
@@ -120,61 +112,57 @@ image: "myserver"
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
ctxInst, err := r.Compile("-", c)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
ctxInst := cuecontext.New().CompileString(c)
|
||||
|
||||
gName, err := ctxInst.Lookup("context", model.ContextName).String()
|
||||
gName, err := ctxInst.LookupPath(value.FieldPath("context", model.ContextName)).String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "mycomp", gName)
|
||||
|
||||
myAppName, err := ctxInst.Lookup("context", model.ContextAppName).String()
|
||||
myAppName, err := ctxInst.LookupPath(value.FieldPath("context", model.ContextAppName)).String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "myapp", myAppName)
|
||||
|
||||
myAppRevision, err := ctxInst.Lookup("context", model.ContextAppRevision).String()
|
||||
myAppRevision, err := ctxInst.LookupPath(value.FieldPath("context", model.ContextAppRevision)).String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "myapp-v1", myAppRevision)
|
||||
|
||||
myAppRevisionNum, err := ctxInst.Lookup("context", model.ContextAppRevisionNum).Int64()
|
||||
myAppRevisionNum, err := ctxInst.LookupPath(value.FieldPath("context", model.ContextAppRevisionNum)).Int64()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, int64(1), myAppRevisionNum)
|
||||
|
||||
myWorkflowName, err := ctxInst.Lookup("context", model.ContextWorkflowName).String()
|
||||
myWorkflowName, err := ctxInst.LookupPath(value.FieldPath("context", model.ContextWorkflowName)).String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "myworkflow", myWorkflowName)
|
||||
|
||||
myPublishVersion, err := ctxInst.Lookup("context", model.ContextPublishVersion).String()
|
||||
myPublishVersion, err := ctxInst.LookupPath(value.FieldPath("context", model.ContextPublishVersion)).String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "mypublishversion", myPublishVersion)
|
||||
|
||||
inputJs, err := ctxInst.Lookup("context", model.OutputFieldName).MarshalJSON()
|
||||
inputJs, err := ctxInst.LookupPath(value.FieldPath("context", model.OutputFieldName)).MarshalJSON()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, `{"image":"myserver"}`, string(inputJs))
|
||||
|
||||
outputsJs, err := ctxInst.Lookup("context", model.OutputsFieldName, "service").MarshalJSON()
|
||||
outputsJs, err := ctxInst.LookupPath(value.FieldPath("context", model.OutputsFieldName, "service")).MarshalJSON()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "{\"apiVersion\":\"v1\",\"kind\":\"ConfigMap\"}", string(outputsJs))
|
||||
|
||||
outputsJs, err = ctxInst.Lookup("context", model.OutputsFieldName, "service-1").MarshalJSON()
|
||||
outputsJs, err = ctxInst.LookupPath(value.FieldPath("context", model.OutputsFieldName, "service-1")).MarshalJSON()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "{\"apiVersion\":\"v1\",\"kind\":\"ConfigMap\"}", string(outputsJs))
|
||||
|
||||
ns, err := ctxInst.Lookup("context", model.ContextNamespace).String()
|
||||
ns, err := ctxInst.LookupPath(value.FieldPath("context", model.ContextNamespace)).String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "myns", ns)
|
||||
|
||||
params, err := ctxInst.Lookup("context", model.ParameterFieldName).MarshalJSON()
|
||||
params, err := ctxInst.LookupPath(value.FieldPath("context", model.ParameterFieldName)).MarshalJSON()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "{\"parameter1\":\"string\",\"parameter2\":{\"key1\":\"value1\",\"key2\":\"value2\"},\"parameter3\":[\"item1\",\"item2\"]}", string(params))
|
||||
|
||||
artifacts, err := ctxInst.Lookup("context", model.ContextDataArtifacts).MarshalJSON()
|
||||
artifacts, err := ctxInst.LookupPath(value.FieldPath("context", model.ContextDataArtifacts)).MarshalJSON()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "{\"bool\":false,\"string\":\"mytxt\",\"int\":10,\"map\":{\"key\":\"value\"},\"slice\":[\"str1\",\"str2\",\"str3\"]}", string(artifacts))
|
||||
assert.Equal(t, "{\"bool\":false,\"int\":10,\"map\":{\"key\":\"value\"},\"slice\":[\"str1\",\"str2\",\"str3\"],\"string\":\"mytxt\"}", string(artifacts))
|
||||
|
||||
arbitraryData, err := ctxInst.Lookup("context", "arbitraryData").MarshalJSON()
|
||||
arbitraryData, err := ctxInst.LookupPath(value.FieldPath("context", "arbitraryData")).MarshalJSON()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "{\"bool\":false,\"string\":\"mytxt\",\"int\":10,\"map\":{\"key\":\"value\"},\"slice\":[\"str1\",\"str2\",\"str3\"]}", string(arbitraryData))
|
||||
assert.Equal(t, "{\"bool\":false,\"int\":10,\"map\":{\"key\":\"value\"},\"slice\":[\"str1\",\"str2\",\"str3\"],\"string\":\"mytxt\"}", string(arbitraryData))
|
||||
}
|
||||
|
||||
@@ -25,24 +25,21 @@ import (
|
||||
|
||||
"github.com/oam-dev/kubevela/pkg/builtin"
|
||||
"github.com/oam-dev/kubevela/pkg/builtin/registry"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
)
|
||||
|
||||
// Process processing the http task
|
||||
func Process(inst *cue.Instance) (*cue.Instance, error) {
|
||||
taskVal := inst.Lookup("processing", "http")
|
||||
func Process(val cue.Value) (cue.Value, error) {
|
||||
taskVal := val.LookupPath(value.FieldPath("processing", "http"))
|
||||
if !taskVal.Exists() {
|
||||
return inst, errors.New("there is no http in processing")
|
||||
return val, errors.New("there is no http in processing")
|
||||
}
|
||||
resp, err := exec(taskVal)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("fail to exec http task, %w", err)
|
||||
return val, fmt.Errorf("fail to exec http task, %w", err)
|
||||
}
|
||||
|
||||
appInst, err := inst.Fill(resp, "processing", "output")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("fail to fill output from http, %w", err)
|
||||
}
|
||||
return appInst, nil
|
||||
return val.FillPath(value.FieldPath("processing", "output"), resp), nil
|
||||
}
|
||||
|
||||
func exec(v cue.Value) (map[string]interface{}, error) {
|
||||
|
||||
@@ -25,8 +25,9 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
cueJson "cuelang.org/go/pkg/encoding/json"
|
||||
"github.com/bmizerany/assert"
|
||||
)
|
||||
@@ -64,20 +65,16 @@ func TestProcess(t *testing.T) {
|
||||
s := NewMock()
|
||||
defer s.Close()
|
||||
|
||||
r := cue.Runtime{}
|
||||
taskTemplate, err := r.Compile("", TaskTemplate)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
taskTemplate, _ = taskTemplate.Fill(map[string]interface{}{
|
||||
taskTemplate := cuecontext.New().CompileString(TaskTemplate)
|
||||
taskTemplate = taskTemplate.FillPath(value.FieldPath(model.ParameterFieldName), map[string]interface{}{
|
||||
"serviceURL": "http://127.0.0.1:8090/api/v1/token?val=test-token",
|
||||
}, model.ParameterFieldName)
|
||||
})
|
||||
|
||||
inst, err := Process(taskTemplate)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
output := inst.Lookup("output")
|
||||
output := inst.LookupPath(value.FieldPath("output"))
|
||||
data, _ := cueJson.Marshal(output)
|
||||
assert.Equal(t, "{\"data\":\"test-token\"}", data)
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/format"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"cuelang.org/go/encoding/gocode/gocodec"
|
||||
@@ -130,6 +131,7 @@ func (def *Definition) SetType(t string) error {
|
||||
}
|
||||
|
||||
// ToCUE converts Definition to CUE value (with predefined Definition's cue format)
|
||||
// nolint:staticcheck
|
||||
func (def *Definition) ToCUE() (*cue.Value, string, error) {
|
||||
annotations := map[string]string{}
|
||||
for key, val := range def.GetAnnotations() {
|
||||
@@ -221,7 +223,7 @@ func (def *Definition) ToCUEString() (string, error) {
|
||||
}
|
||||
|
||||
// FromCUE converts CUE value (predefined Definition's cue format) to Definition
|
||||
// nolint:gocyclo
|
||||
// nolint:gocyclo,staticcheck
|
||||
func (def *Definition) FromCUE(val *cue.Value, templateString string) error {
|
||||
if def.Object == nil {
|
||||
def.Object = map[string]interface{}{}
|
||||
@@ -343,7 +345,7 @@ func (def *Definition) FromYAML(data []byte) error {
|
||||
|
||||
// FromCUEString converts cue string into Definition
|
||||
func (def *Definition) FromCUEString(cueString string, config *rest.Config) error {
|
||||
r := &cue.Runtime{}
|
||||
cuectx := cuecontext.New()
|
||||
f, err := parser.ParseFile("-", cueString, parser.ParseComments)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -393,9 +395,9 @@ func (def *Definition) FromCUEString(cueString string, config *rest.Config) erro
|
||||
return errors.Wrapf(err, "failed to encode template decls to string")
|
||||
}
|
||||
|
||||
inst, err := r.Compile("-", metadataString)
|
||||
if err != nil {
|
||||
return err
|
||||
inst := cuectx.CompileString(metadataString)
|
||||
if inst.Err() != nil {
|
||||
return inst.Err()
|
||||
}
|
||||
templateString, err = formatCUEString(importString + templateString)
|
||||
if err != nil {
|
||||
@@ -410,11 +412,10 @@ func (def *Definition) FromCUEString(cueString string, config *rest.Config) erro
|
||||
if _, err = value.NewValue(templateString+"\n"+velacue.BaseTemplate, pd, ""); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if _, err = r.Compile("-", templateString+"\n"+velacue.BaseTemplate); err != nil {
|
||||
return err
|
||||
} else if val := cuectx.CompileString(templateString + "\n" + velacue.BaseTemplate); val.Err() != nil {
|
||||
return val.Err()
|
||||
}
|
||||
val := inst.Value()
|
||||
return def.FromCUE(&val, templateString)
|
||||
return def.FromCUE(&inst, templateString)
|
||||
}
|
||||
|
||||
// ValidDefinitionTypes return the list of valid definition types
|
||||
|
||||
@@ -84,8 +84,8 @@ func TestDefinitionBasicFunctions(t *testing.T) {
|
||||
if err = def.FromCUEString("template:"+parts[1], nil); err == nil {
|
||||
t.Fatalf("should encounter no metadata found error but not found error")
|
||||
}
|
||||
if err = def.FromCUEString("import \"strconv\"\n"+cueString, nil); err == nil {
|
||||
t.Fatalf("should encounter cue compile error due to useless import but not found error")
|
||||
if err = def.FromCUEString("import \"strconv\"\n"+cueString, nil); err != nil {
|
||||
t.Fatalf("should not encounter cue compile error due to useless import")
|
||||
}
|
||||
if err = def.FromCUEString("abc: {}\n"+cueString, nil); err == nil {
|
||||
t.Fatalf("should encounter duplicated object name error but not found error")
|
||||
|
||||
@@ -101,6 +101,7 @@ func NewStructParameter() StructParameter {
|
||||
}
|
||||
|
||||
// parseParameters will be called recursively to parse parameters
|
||||
// nolint:staticcheck
|
||||
func parseParameters(paraValue cue.Value, paramKey string) error {
|
||||
param := NewStructParameter()
|
||||
param.Name = paramKey
|
||||
|
||||
@@ -65,7 +65,7 @@ import (
|
||||
#ApplyComponentRemaining: #Steps & {
|
||||
// exceptions specify the resources not to apply.
|
||||
exceptions: [...string]
|
||||
_exceptions: {for c in exceptions {"\(c)": true}}
|
||||
exceptions_: {for c in exceptions {"\(c)": true}}
|
||||
component: string
|
||||
|
||||
load: oam.#LoadComponets @step(1)
|
||||
@@ -77,7 +77,7 @@ import (
|
||||
value: rendered.output
|
||||
}
|
||||
for name, c in rendered.outputs {
|
||||
if _exceptions[name] == _|_ {
|
||||
if exceptions_[name] == _|_ {
|
||||
"\(name)": kube.#Apply & {
|
||||
value: c
|
||||
}
|
||||
@@ -89,12 +89,12 @@ import (
|
||||
#ApplyRemaining: #Steps & {
|
||||
// exceptions specify the resources not to apply.
|
||||
exceptions: [...string]
|
||||
_exceptions: {for c in exceptions {"\(c)": true}}
|
||||
exceptions_: {for c in exceptions {"\(c)": true}}
|
||||
|
||||
load: oam.#LoadComponets @step(1)
|
||||
components: #Steps & {
|
||||
for name, c in load.value {
|
||||
if _exceptions[name] == _|_ {
|
||||
if exceptions_[name] == _|_ {
|
||||
"\(name)": oam.#ApplyComponent & {
|
||||
value: c
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"cuelang.org/go/cue/build"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"k8s.io/klog/v2"
|
||||
)
|
||||
|
||||
@@ -88,7 +89,11 @@ func AddImportsFor(inst *build.Instance, tagTempl string) error {
|
||||
PkgName: filepath.Base("vela/custom"),
|
||||
ImportPath: "vela/custom",
|
||||
}
|
||||
if err := p.AddFile("-", tagTempl); err != nil {
|
||||
file, err := parser.ParseFile("-", tagTempl, parser.ParseComments)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.AddSyntax(file); err != nil {
|
||||
return err
|
||||
}
|
||||
inst.Imports = append(inst.Imports, p)
|
||||
@@ -107,7 +112,11 @@ func initBuiltinImports() ([]*build.Instance, error) {
|
||||
PkgName: filepath.Base(path),
|
||||
ImportPath: path,
|
||||
}
|
||||
if err := p.AddFile("-", content); err != nil {
|
||||
file, err := parser.ParseFile("-", content, parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := p.AddSyntax(file); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
imports = append(imports, p)
|
||||
|
||||
@@ -20,6 +20,8 @@ import (
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue/build"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"cuelang.org/go/cue/parser"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"gotest.tools/assert"
|
||||
@@ -28,22 +30,25 @@ import (
|
||||
func TestGetPackages(t *testing.T) {
|
||||
pkgs, err := GetPackages()
|
||||
assert.NilError(t, err)
|
||||
var r cue.Runtime
|
||||
cuectx := cuecontext.New()
|
||||
for path, content := range pkgs {
|
||||
_, err := r.Compile(path, content)
|
||||
file, err := parser.ParseFile(path, content)
|
||||
assert.NilError(t, err)
|
||||
_ = cuectx.BuildFile(file)
|
||||
}
|
||||
|
||||
builder := &build.Instance{}
|
||||
builder.AddFile("-", `
|
||||
file, err := parser.ParseFile("-", `
|
||||
import "vela/custom"
|
||||
out: custom.context`)
|
||||
assert.NilError(t, err)
|
||||
builder := &build.Instance{}
|
||||
err = builder.AddSyntax(file)
|
||||
assert.NilError(t, err)
|
||||
err = AddImportsFor(builder, "context: id: \"xxx\"")
|
||||
assert.NilError(t, err)
|
||||
|
||||
insts := cue.Build([]*build.Instance{builder})
|
||||
assert.Equal(t, len(insts), 1)
|
||||
str, err := insts[0].Lookup("out", "id").String()
|
||||
inst := cuectx.BuildInstance(builder)
|
||||
str, err := inst.LookupPath(cue.ParsePath("out.id")).String()
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, str, "xxx")
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@
|
||||
|
||||
loadPolicies: oam.#LoadPolicies @step(1)
|
||||
policy_: string
|
||||
if inputs.policy == "" {
|
||||
if inputs.policy == "" && loadPolicies.value != _|_ {
|
||||
envBindingPolicies: [ for k, v in loadPolicies.value if v.type == "env-binding" {k}]
|
||||
policy_: envBindingPolicies[0]
|
||||
}
|
||||
|
||||
@@ -32,7 +32,6 @@ import (
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/build"
|
||||
"cuelang.org/go/cue/format"
|
||||
"cuelang.org/go/encoding/openapi"
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
@@ -71,6 +70,7 @@ import (
|
||||
"github.com/oam-dev/kubevela/apis/types"
|
||||
velacue "github.com/oam-dev/kubevela/pkg/cue"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
"github.com/oam-dev/kubevela/pkg/cue/packages"
|
||||
"github.com/oam-dev/kubevela/pkg/oam"
|
||||
)
|
||||
@@ -234,27 +234,11 @@ func HTTPGetKubernetesObjects(ctx context.Context, url string) ([]*unstructured.
|
||||
|
||||
// GetCUEParameterValue converts definitions to cue format
|
||||
func GetCUEParameterValue(cueStr string, pd *packages.PackageDiscover) (cue.Value, error) {
|
||||
var template *cue.Instance
|
||||
var err error
|
||||
if pd != nil {
|
||||
bi := build.NewContext().NewInstance("", nil)
|
||||
err := bi.AddFile("-", cueStr+velacue.BaseTemplate)
|
||||
if err != nil {
|
||||
return cue.Value{}, err
|
||||
}
|
||||
|
||||
template, err = pd.ImportPackagesAndBuildInstance(bi)
|
||||
if err != nil {
|
||||
return cue.Value{}, err
|
||||
}
|
||||
} else {
|
||||
r := cue.Runtime{}
|
||||
template, err = r.Compile("", cueStr+velacue.BaseTemplate)
|
||||
if err != nil {
|
||||
return cue.Value{}, err
|
||||
}
|
||||
template, err := value.NewValue(cueStr+velacue.BaseTemplate, pd, "")
|
||||
if err != nil {
|
||||
return cue.Value{}, err
|
||||
}
|
||||
tempStruct, err := template.Value().Struct()
|
||||
tempStruct, err := template.CueValue().Struct()
|
||||
if err != nil {
|
||||
return cue.Value{}, err
|
||||
}
|
||||
@@ -277,7 +261,13 @@ func GetCUEParameterValue(cueStr string, pd *packages.PackageDiscover) (cue.Valu
|
||||
}
|
||||
|
||||
// GenOpenAPI generates OpenAPI json schema from cue.Instance
|
||||
func GenOpenAPI(inst *cue.Instance) ([]byte, error) {
|
||||
func GenOpenAPI(inst *cue.Instance) (b []byte, err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
err = fmt.Errorf("invalid cue definition to generate open api: %v", r)
|
||||
return
|
||||
}
|
||||
}()
|
||||
if inst.Err != nil {
|
||||
return nil, inst.Err
|
||||
}
|
||||
@@ -285,8 +275,8 @@ func GenOpenAPI(inst *cue.Instance) ([]byte, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defaultConfig := &openapi.Config{}
|
||||
b, err := openapi.Gen(paramOnlyIns, defaultConfig)
|
||||
defaultConfig := &openapi.Config{ExpandReferences: true}
|
||||
b, err = openapi.Gen(paramOnlyIns, defaultConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -296,8 +286,9 @@ func GenOpenAPI(inst *cue.Instance) ([]byte, error) {
|
||||
}
|
||||
|
||||
// extractParameterDefinitionNodeFromInstance extracts the `#parameter` ast.Node from root instance, if failed fall back to `parameter` by LookUpDef
|
||||
// nolint:staticcheck
|
||||
func extractParameterDefinitionNodeFromInstance(inst *cue.Instance) ast.Node {
|
||||
opts := []cue.Option{cue.All(), cue.DisallowCycles(true), cue.ResolveReferences(true), cue.Docs(true)}
|
||||
opts := []cue.Option{cue.Docs(true), cue.InlineImports(true)}
|
||||
node := inst.Value().Syntax(opts...)
|
||||
if fileNode, ok := node.(*ast.File); ok {
|
||||
for _, decl := range fileNode.Decls {
|
||||
@@ -313,13 +304,14 @@ func extractParameterDefinitionNodeFromInstance(inst *cue.Instance) ast.Node {
|
||||
}
|
||||
|
||||
// RefineParameterInstance refines cue instance to merely include `parameter` identifier
|
||||
// nolint:staticcheck
|
||||
func RefineParameterInstance(inst *cue.Instance) (*cue.Instance, error) {
|
||||
r := cue.Runtime{}
|
||||
paramVal := inst.LookupDef(model.ParameterFieldName)
|
||||
paramVal := inst.Lookup(model.ParameterFieldName)
|
||||
var paramOnlyStr string
|
||||
switch k := paramVal.IncompleteKind(); k {
|
||||
case cue.StructKind, cue.ListKind:
|
||||
paramSyntax, _ := format.Node(extractParameterDefinitionNodeFromInstance(inst))
|
||||
paramSyntax, _ := format.Node(paramVal.Value().Syntax(cue.Docs(true), cue.ResolveReferences(true)))
|
||||
paramOnlyStr = fmt.Sprintf("#%s: %s\n", model.ParameterFieldName, string(paramSyntax))
|
||||
case cue.IntKind, cue.StringKind, cue.FloatKind, cue.BoolKind:
|
||||
paramOnlyStr = fmt.Sprintf("#%s: %v", model.ParameterFieldName, paramVal)
|
||||
|
||||
@@ -305,6 +305,7 @@ name
|
||||
}
|
||||
}
|
||||
|
||||
// nolint:staticcheck
|
||||
func TestGenOpenAPI(t *testing.T) {
|
||||
type want struct {
|
||||
targetSchemaFile string
|
||||
@@ -445,6 +446,7 @@ variable "mapVar" {
|
||||
assert.True(t, intVarExisted)
|
||||
}
|
||||
|
||||
// nolint:staticcheck
|
||||
func TestRefineParameterInstance(t *testing.T) {
|
||||
// test #parameter exists: mock issues in #1939 & #2062
|
||||
s := `parameter: #parameter
|
||||
|
||||
4
pkg/utils/common/testdata/workload1.cue
vendored
4
pkg/utils/common/testdata/workload1.cue
vendored
@@ -1,4 +1,4 @@
|
||||
#parameter: {
|
||||
parameter: {
|
||||
// +usage=Which image would you like to use for your service
|
||||
// +short=i
|
||||
image: string
|
||||
@@ -11,4 +11,4 @@
|
||||
|
||||
#routeName: "\(context.appName)-\(context.name)"
|
||||
|
||||
context: {}
|
||||
context: {}
|
||||
|
||||
8
pkg/utils/common/testdata/workload1.json
vendored
8
pkg/utils/common/testdata/workload1.json
vendored
@@ -13,6 +13,10 @@
|
||||
"image"
|
||||
],
|
||||
"properties": {
|
||||
"image": {
|
||||
"description": "+usage=Which image would you like to use for your service\n+short=i",
|
||||
"type": "string"
|
||||
},
|
||||
"cmd": {
|
||||
"description": "+usage=Commands to run in the container",
|
||||
"type": "array",
|
||||
@@ -20,10 +24,6 @@
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"image": {
|
||||
"description": "+usage=Which image would you like to use for your service\n+short=i",
|
||||
"type": "string"
|
||||
},
|
||||
"cpu": {
|
||||
"type": "string"
|
||||
}
|
||||
|
||||
@@ -16,7 +16,9 @@ limitations under the License.
|
||||
|
||||
package errors
|
||||
|
||||
import "strings"
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
// LabelConflict defines the conflict label error string
|
||||
@@ -33,3 +35,8 @@ func IsLabelConflict(err error) bool {
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsCuePathNotFound checks if the error is cue path not found error
|
||||
func IsCuePathNotFound(err error) bool {
|
||||
return strings.Contains(err.Error(), "failed to lookup value") && strings.Contains(err.Error(), "not exist")
|
||||
}
|
||||
|
||||
@@ -41,6 +41,7 @@ import (
|
||||
"github.com/oam-dev/kubevela/pkg/cue/model/value"
|
||||
"github.com/oam-dev/kubevela/pkg/oam"
|
||||
"github.com/oam-dev/kubevela/pkg/oam/util"
|
||||
verrors "github.com/oam-dev/kubevela/pkg/utils/errors"
|
||||
querytypes "github.com/oam-dev/kubevela/pkg/velaql/providers/query/types"
|
||||
"github.com/oam-dev/kubevela/pkg/workflow/providers"
|
||||
)
|
||||
@@ -252,7 +253,7 @@ var _ = Describe("Test Query Provider", func() {
|
||||
Expect(err).Should(BeNil())
|
||||
err = prd.ListResourcesInApp(nil, newV, nil)
|
||||
Expect(err).ShouldNot(BeNil())
|
||||
Expect(err.Error()).Should(Equal("var(path=app) not exist"))
|
||||
Expect(verrors.IsCuePathNotFound(err)).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
@@ -392,14 +393,14 @@ var _ = Describe("Test Query Provider", func() {
|
||||
Expect(err).Should(BeNil())
|
||||
err = prd.SearchEvents(nil, v, nil)
|
||||
Expect(err).ShouldNot(BeNil())
|
||||
Expect(err.Error()).Should(Equal("var(path=value) not exist"))
|
||||
Expect(verrors.IsCuePathNotFound(err)).Should(BeTrue())
|
||||
|
||||
optWithoutCluster := `value: {}`
|
||||
v, err = value.NewValue(optWithoutCluster, nil, "")
|
||||
Expect(err).Should(BeNil())
|
||||
err = prd.SearchEvents(nil, v, nil)
|
||||
Expect(err).ShouldNot(BeNil())
|
||||
Expect(err.Error()).Should(Equal("var(path=cluster) not exist"))
|
||||
Expect(verrors.IsCuePathNotFound(err)).Should(BeTrue())
|
||||
|
||||
optWithWrongValue := `value: {}
|
||||
cluster: "test"`
|
||||
@@ -424,20 +425,20 @@ cluster: "test"`
|
||||
Expect(err).Should(Succeed())
|
||||
err = prd.CollectLogsInPod(nil, v, nil)
|
||||
Expect(err).ShouldNot(BeNil())
|
||||
Expect(err.Error()).Should(ContainSubstring("var(path=cluster) not exist"))
|
||||
Expect(verrors.IsCuePathNotFound(err)).Should(BeTrue())
|
||||
|
||||
v, err = value.NewValue(`cluster: "local"`, nil, "")
|
||||
Expect(err).Should(Succeed())
|
||||
err = prd.CollectLogsInPod(nil, v, nil)
|
||||
Expect(err).ShouldNot(BeNil())
|
||||
Expect(err.Error()).Should(ContainSubstring("var(path=namespace) not exist"))
|
||||
Expect(verrors.IsCuePathNotFound(err)).Should(BeTrue())
|
||||
|
||||
v, err = value.NewValue(`cluster: "local"
|
||||
namespace: "default"`, nil, "")
|
||||
Expect(err).Should(Succeed())
|
||||
err = prd.CollectLogsInPod(nil, v, nil)
|
||||
Expect(err).ShouldNot(BeNil())
|
||||
Expect(err.Error()).Should(ContainSubstring("var(path=pod) not exist"))
|
||||
Expect(verrors.IsCuePathNotFound(err)).Should(BeTrue())
|
||||
|
||||
v, err = value.NewValue(`cluster: "local"
|
||||
namespace: "default"
|
||||
@@ -445,7 +446,7 @@ pod: "hello-world"`, nil, "")
|
||||
Expect(err).Should(Succeed())
|
||||
err = prd.CollectLogsInPod(nil, v, nil)
|
||||
Expect(err).ShouldNot(BeNil())
|
||||
Expect(err.Error()).Should(ContainSubstring("var(path=options) not exist"))
|
||||
Expect(verrors.IsCuePathNotFound(err)).Should(BeTrue())
|
||||
|
||||
v, err = value.NewValue(`cluster: "local"
|
||||
namespace: "default"
|
||||
|
||||
@@ -24,7 +24,7 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"github.com/pkg/errors"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
kerrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
@@ -273,11 +273,7 @@ type ComponentManifest struct {
|
||||
|
||||
// Patch the ComponentManifest with value
|
||||
func (comp *ComponentManifest) Patch(patchValue *value.Value) error {
|
||||
pInst, err := model.NewOther(patchValue.CueValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return comp.Workload.Unify(pInst)
|
||||
return comp.Workload.Unify(patchValue.CueValue())
|
||||
}
|
||||
|
||||
type componentMould struct {
|
||||
@@ -286,11 +282,19 @@ type componentMould struct {
|
||||
}
|
||||
|
||||
func (comp *ComponentManifest) string() (string, error) {
|
||||
workload, err := comp.Workload.String()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
cm := componentMould{
|
||||
StandardWorkload: comp.Workload.String(),
|
||||
StandardWorkload: workload,
|
||||
}
|
||||
for _, aux := range comp.Auxiliaries {
|
||||
cm.Traits = append(cm.Traits, aux.String())
|
||||
auxiliary, err := aux.String()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
cm.Traits = append(cm.Traits, auxiliary)
|
||||
}
|
||||
js, err := json.Marshal(cm)
|
||||
return string(js), err
|
||||
@@ -302,23 +306,16 @@ func (comp *ComponentManifest) unmarshal(v string) error {
|
||||
if err := json.Unmarshal([]byte(v), &cm); err != nil {
|
||||
return err
|
||||
}
|
||||
var r cue.Runtime
|
||||
wlInst, err := r.Compile("workload", cm.StandardWorkload)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
wl, err := model.NewBase(wlInst.Value())
|
||||
wlInst := cuecontext.New().CompileString(cm.StandardWorkload)
|
||||
wl, err := model.NewBase(wlInst)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
comp.Workload = wl
|
||||
for _, s := range cm.Traits {
|
||||
auxInst, err := r.Compile("-", s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
aux, err := model.NewOther(auxInst.Value())
|
||||
auxInst := cuecontext.New().CompileString(s)
|
||||
aux, err := model.NewOther(auxInst)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -44,7 +44,9 @@ func TestComponent(t *testing.T) {
|
||||
_, ok := components["server"]
|
||||
r.Equal(ok, true)
|
||||
|
||||
r.Equal(cmf.Workload.String(), `apiVersion: "v1"
|
||||
s, err := cmf.Workload.String()
|
||||
r.NoError(err)
|
||||
r.Equal(s, `apiVersion: "v1"
|
||||
kind: "Pod"
|
||||
metadata: {
|
||||
labels: {
|
||||
@@ -53,32 +55,34 @@ metadata: {
|
||||
}
|
||||
spec: {
|
||||
containers: [{
|
||||
name: "main"
|
||||
env: [{
|
||||
name: "APP"
|
||||
value: "nginx"
|
||||
}, ...]
|
||||
}]
|
||||
image: "nginx:1.14.2"
|
||||
imagePullPolicy: "IfNotPresent"
|
||||
name: "main"
|
||||
ports: [{
|
||||
containerPort: 8080
|
||||
protocol: "TCP"
|
||||
}, ...]
|
||||
}, ...]
|
||||
}]
|
||||
}]
|
||||
}
|
||||
`)
|
||||
r.Equal(len(cmf.Auxiliaries), 1)
|
||||
r.Equal(cmf.Auxiliaries[0].String(), `apiVersion: "v1"
|
||||
s, err = cmf.Auxiliaries[0].String()
|
||||
r.NoError(err)
|
||||
r.Equal(s, `apiVersion: "v1"
|
||||
kind: "Service"
|
||||
metadata: {
|
||||
name: "my-service"
|
||||
}
|
||||
spec: {
|
||||
ports: [{
|
||||
protocol: "TCP"
|
||||
port: 80
|
||||
protocol: "TCP"
|
||||
targetPort: 8080
|
||||
}, ...]
|
||||
}]
|
||||
selector: {
|
||||
app: "nginx"
|
||||
}
|
||||
@@ -96,7 +100,9 @@ env:[{name: "ClusterIP",value: "1.1.1.1"}]}]
|
||||
|
||||
cmf, err = wfCtx.GetComponent("server")
|
||||
r.NoError(err)
|
||||
r.Equal(cmf.Workload.String(), `apiVersion: "v1"
|
||||
s, err = cmf.Workload.String()
|
||||
r.NoError(err)
|
||||
r.Equal(s, `apiVersion: "v1"
|
||||
kind: "Pod"
|
||||
metadata: {
|
||||
labels: {
|
||||
@@ -105,7 +111,6 @@ metadata: {
|
||||
}
|
||||
spec: {
|
||||
containers: [{
|
||||
name: "main"
|
||||
// +patchKey=name
|
||||
env: [{
|
||||
name: "APP"
|
||||
@@ -116,11 +121,12 @@ spec: {
|
||||
}, ...]
|
||||
image: "nginx:1.14.2"
|
||||
imagePullPolicy: "IfNotPresent"
|
||||
name: "main"
|
||||
ports: [{
|
||||
containerPort: 8080
|
||||
protocol: "TCP"
|
||||
}, ...]
|
||||
}, ...]
|
||||
}]
|
||||
}
|
||||
`)
|
||||
|
||||
@@ -201,7 +207,7 @@ result: 101
|
||||
conflictV, err := value.NewValue(`score: 101`, nil, "")
|
||||
r.NoError(err)
|
||||
err = wfCtx.SetVar(conflictV, "football")
|
||||
r.Equal(err.Error(), "football.result: conflicting values 100 and 101")
|
||||
r.Equal(err.Error(), "football.score: conflicting values 101 and 100")
|
||||
}
|
||||
|
||||
func TestRefObj(t *testing.T) {
|
||||
|
||||
@@ -38,8 +38,10 @@ func Input(ctx wfContext.Context, paramValue *value.Value, step v1beta1.Workflow
|
||||
if err != nil {
|
||||
return errors.WithMessagef(err, "get input from [%s]", input.From)
|
||||
}
|
||||
if err := paramValue.FillValueByScript(inputValue, input.ParameterKey); err != nil {
|
||||
return err
|
||||
if input.ParameterKey != "" {
|
||||
if err := paramValue.FillValueByScript(inputValue, input.ParameterKey); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
||||
@@ -58,11 +58,11 @@ stepID: "success"
|
||||
},
|
||||
"no-step-id": {
|
||||
from: ``,
|
||||
expectedErr: errors.New("var(path=stepID) not exist"),
|
||||
expectedErr: errors.New("failed to lookup value: var(path=stepID) not exist"),
|
||||
},
|
||||
"no-sender": {
|
||||
from: `stepID:"no-sender"`,
|
||||
expectedErr: errors.New("var(path=from) not exist"),
|
||||
expectedErr: errors.New("failed to lookup value: var(path=from) not exist"),
|
||||
},
|
||||
"no-receiver": {
|
||||
from: `
|
||||
@@ -75,7 +75,7 @@ port: 465
|
||||
}
|
||||
stepID: "no-receiver"
|
||||
`,
|
||||
expectedErr: errors.New("var(path=to) not exist"),
|
||||
expectedErr: errors.New("failed to lookup value: var(path=to) not exist"),
|
||||
},
|
||||
"no-content": {
|
||||
from: `
|
||||
@@ -89,7 +89,7 @@ port: 465
|
||||
to: ["user1@gmail.com", "user2@gmail.com"]
|
||||
stepID: "no-content"
|
||||
`,
|
||||
expectedErr: errors.New("var(path=content) not exist"),
|
||||
expectedErr: errors.New("failed to lookup value: var(path=content) not exist"),
|
||||
},
|
||||
"send-fail": {
|
||||
from: `
|
||||
|
||||
@@ -69,11 +69,7 @@ func (h *provider) Apply(ctx wfContext.Context, v *value.Value, act types.Action
|
||||
return err
|
||||
}
|
||||
|
||||
patcher, err := model.NewOther(pv)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := base.Unify(patcher); err != nil {
|
||||
if err := base.Unify(pv); err != nil {
|
||||
return err
|
||||
}
|
||||
workload, err = base.Unstructured()
|
||||
|
||||
@@ -117,13 +117,15 @@ var _ = Describe("Test Workflow Provider Kube", func() {
|
||||
component, err := ctx.GetComponent("server")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
s, err := component.Workload.String()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
v, err := value.NewValue(fmt.Sprintf(`
|
||||
value:{
|
||||
%s
|
||||
metadata: name: "app"
|
||||
}
|
||||
cluster: ""
|
||||
`, component.Workload.String()), nil, "")
|
||||
`, s), nil, "")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
err = p.Apply(ctx, v, nil)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
@@ -136,13 +138,15 @@ cluster: ""
|
||||
}, workload)
|
||||
}, time.Second*2, time.Millisecond*300).Should(BeNil())
|
||||
|
||||
s, err = component.Workload.String()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
v, err = value.NewValue(fmt.Sprintf(`
|
||||
value: {
|
||||
%s
|
||||
metadata: name: "app"
|
||||
}
|
||||
cluster: ""
|
||||
`, component.Workload.String()), nil, "")
|
||||
`, s), nil, "")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
err = p.Read(ctx, v, nil)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
@@ -179,10 +183,12 @@ cluster: ""
|
||||
|
||||
component, err := ctx.GetComponent("server")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
s, err := component.Workload.String()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
v, err := value.NewValue(fmt.Sprintf(`
|
||||
value: {%s}
|
||||
cluster: ""
|
||||
patch: metadata: name: "test-app-1"`, component.Workload.String()), nil, "")
|
||||
patch: metadata: name: "test-app-1"`, s), nil, "")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
err = p.Apply(ctx, v, nil)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"cuelang.org/go/cue/cuecontext"
|
||||
"github.com/pkg/errors"
|
||||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
"sigs.k8s.io/controller-runtime/pkg/client"
|
||||
@@ -199,8 +200,15 @@ func (p *provider) LoadComponent(ctx wfContext.Context, v *value.Value, act wfTy
|
||||
return err
|
||||
}
|
||||
vs := string(jt)
|
||||
if s, err := sets.OpenBaiscLit(vs); err == nil {
|
||||
vs = s
|
||||
cuectx := cuecontext.New()
|
||||
val := cuectx.CompileString(vs)
|
||||
if s, err := sets.OpenBaiscLit(val); err == nil {
|
||||
v := cuectx.BuildFile(s)
|
||||
str, err := sets.ToString(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
vs = str
|
||||
}
|
||||
if err := v.FillRaw(vs, "value", comp.Name); err != nil {
|
||||
return err
|
||||
|
||||
@@ -42,7 +42,7 @@ func TestParser(t *testing.T) {
|
||||
v, err := value.NewValue("", nil, "")
|
||||
r.NoError(err)
|
||||
err = p.ApplyComponent(nil, v, act)
|
||||
r.Equal(err.Error(), "var(path=value) not exist")
|
||||
r.Equal(err.Error(), "failed to lookup value: var(path=value) not exist")
|
||||
v.FillObject(map[string]interface{}{}, "value")
|
||||
err = p.ApplyComponent(nil, v, act)
|
||||
r.NoError(err)
|
||||
@@ -53,10 +53,10 @@ func TestParser(t *testing.T) {
|
||||
r.Equal(outStr, `apiVersion: "v1"
|
||||
kind: "Pod"
|
||||
metadata: {
|
||||
name: "rss-site"
|
||||
labels: {
|
||||
app: "web"
|
||||
}
|
||||
name: "rss-site"
|
||||
}
|
||||
`)
|
||||
|
||||
@@ -68,10 +68,10 @@ metadata: {
|
||||
apiVersion: "v1"
|
||||
kind: "Service"
|
||||
metadata: {
|
||||
name: "service"
|
||||
labels: {
|
||||
"trait.oam.dev/resource": "service"
|
||||
}
|
||||
name: "service"
|
||||
}
|
||||
}
|
||||
`)
|
||||
|
||||
@@ -53,12 +53,12 @@ layout: "Mon, 02 Jan 2006 15:04:05 MST"`,
|
||||
"test convert date without time layout": {
|
||||
from: `date: "2021-11-07T01:47:51Z"`,
|
||||
expected: 0,
|
||||
expectedErr: errors.New("var(path=layout) not exist"),
|
||||
expectedErr: errors.New("failed to lookup value: var(path=layout) not exist"),
|
||||
},
|
||||
"test convert without date": {
|
||||
from: ``,
|
||||
expected: 0,
|
||||
expectedErr: errors.New("var(path=date) not exist"),
|
||||
expectedErr: errors.New("failed to lookup value: var(path=date) not exist"),
|
||||
},
|
||||
"test convert date with wrong time layout": {
|
||||
from: `date: "2021-11-07T01:47:51Z"
|
||||
@@ -119,12 +119,12 @@ layout: "Mon, 02 Jan 2006 15:04:05 MST"
|
||||
"test convert date without time layout": {
|
||||
from: `timestamp: 1551452400`,
|
||||
expected: "",
|
||||
expectedErr: errors.New("var(path=layout) not exist"),
|
||||
expectedErr: errors.New("failed to lookup value: var(path=layout) not exist"),
|
||||
},
|
||||
"test convert without timestamp": {
|
||||
from: ``,
|
||||
expected: "",
|
||||
expectedErr: errors.New("var(path=timestamp) not exist"),
|
||||
expectedErr: errors.New("failed to lookup value: var(path=timestamp) not exist"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -47,11 +47,7 @@ func (p *provider) PatchK8sObject(ctx wfContext.Context, v *value.Value, act typ
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
patcher, err := model.NewOther(pv.CueValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = base.Unify(patcher); err != nil {
|
||||
if err = base.Unify(pv.CueValue()); err != nil {
|
||||
return v.FillObject(err, "err")
|
||||
}
|
||||
|
||||
|
||||
@@ -169,7 +169,7 @@ func TestConvertString(t *testing.T) {
|
||||
},
|
||||
"fail": {
|
||||
from: `bt: 123`,
|
||||
expectedErr: errors.New("bt: cannot use value 123 (type int) as string|bytes"),
|
||||
expectedErr: errors.New("bt: cannot use value 123 (type int) as (string|bytes)"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -58,13 +58,21 @@ func (h *provider) Load(ctx wfContext.Context, v *value.Value, act types.Action)
|
||||
}
|
||||
|
||||
func fillComponent(v *value.Value, component *wfContext.ComponentManifest, paths ...string) error {
|
||||
if err := v.FillRaw(component.Workload.String(), append(paths, "workload")...); err != nil {
|
||||
workload, err := component.Workload.String()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := v.FillRaw(workload, append(paths, "workload")...); err != nil {
|
||||
return err
|
||||
}
|
||||
if len(component.Auxiliaries) > 0 {
|
||||
var auxiliaries []string
|
||||
for _, aux := range component.Auxiliaries {
|
||||
auxiliaries = append(auxiliaries, "{"+aux.String()+"}")
|
||||
auxiliary, err := aux.String()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
auxiliaries = append(auxiliaries, "{"+auxiliary+"}")
|
||||
}
|
||||
if err := v.FillRaw(fmt.Sprintf("[%s]", strings.Join(auxiliaries, ",")), append(paths, "auxiliaries")...); err != nil {
|
||||
return err
|
||||
@@ -138,7 +146,7 @@ func (h *provider) Wait(ctx wfContext.Context, v *value.Value, act types.Action)
|
||||
|
||||
cv := v.CueValue()
|
||||
if cv.Exists() {
|
||||
ret := cv.Lookup("continue")
|
||||
ret := cv.LookupPath(value.FieldPath("continue"))
|
||||
if ret.Exists() {
|
||||
isContinue, err := ret.Bool()
|
||||
if err == nil && isContinue {
|
||||
|
||||
@@ -85,7 +85,8 @@ component: "server"
|
||||
assert.NilError(t, err)
|
||||
component, err := wfCtx.GetComponent("server")
|
||||
assert.NilError(t, err)
|
||||
s := component.Workload.String()
|
||||
s, err := component.Workload.String()
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, s, `apiVersion: "v1"
|
||||
kind: "Pod"
|
||||
metadata: {
|
||||
@@ -95,7 +96,6 @@ metadata: {
|
||||
}
|
||||
spec: {
|
||||
containers: [{
|
||||
name: "main"
|
||||
// +patchKey=name
|
||||
env: [{
|
||||
name: "APP"
|
||||
@@ -106,11 +106,12 @@ spec: {
|
||||
}, ...]
|
||||
image: "nginx:1.14.2"
|
||||
imagePullPolicy: "IfNotPresent"
|
||||
name: "main"
|
||||
ports: [{
|
||||
containerPort: 8080
|
||||
protocol: "TCP"
|
||||
}, ...]
|
||||
}, ...]
|
||||
}]
|
||||
}
|
||||
`)
|
||||
|
||||
@@ -320,18 +321,18 @@ metadata:
|
||||
}
|
||||
spec: {
|
||||
containers: [{
|
||||
name: "main"
|
||||
env: [{
|
||||
name: "APP"
|
||||
value: "nginx"
|
||||
}, ...]
|
||||
}]
|
||||
image: "nginx:1.14.2"
|
||||
imagePullPolicy: "IfNotPresent"
|
||||
name: "main"
|
||||
ports: [{
|
||||
containerPort: 8080
|
||||
protocol: "TCP"
|
||||
}, ...]
|
||||
}, ...]
|
||||
}]
|
||||
}]
|
||||
}
|
||||
}
|
||||
auxiliaries: [{
|
||||
@@ -342,10 +343,10 @@ auxiliaries: [{
|
||||
}
|
||||
spec: {
|
||||
ports: [{
|
||||
protocol: "TCP"
|
||||
port: 80
|
||||
protocol: "TCP"
|
||||
targetPort: 8080
|
||||
}, ...]
|
||||
}]
|
||||
selector: {
|
||||
app: "nginx"
|
||||
}
|
||||
|
||||
@@ -145,6 +145,12 @@ func (t *TaskLoader) makeTaskGenerator(templ string) (wfTypes.TaskGenerator, err
|
||||
var paramFile string
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
exec.err(ctx, false, fmt.Errorf("invalid cue task for evaluation: %v", r), wfTypes.StatusReasonRendering)
|
||||
stepStatus = exec.status()
|
||||
operations = exec.operation()
|
||||
return
|
||||
}
|
||||
if taskv == nil {
|
||||
taskv, err = convertTemplate(ctx, t.pd, strings.Join([]string{templ, paramFile}, "\n"), exec.wfStatus.ID, options.PCtx)
|
||||
if err != nil {
|
||||
@@ -290,7 +296,14 @@ func buildValueForStatus(ctx wfContext.Context, step v1beta1.WorkflowStep, pd *p
|
||||
statusTemplate += fmt.Sprintf("status: %s\n", status)
|
||||
statusTemplate += contextTempl
|
||||
statusTemplate += "\n" + inputsTempl
|
||||
return value.NewValue(template+"\n"+statusTemplate, pd, "")
|
||||
v, err := value.NewValue(template+"\n"+statusTemplate, pd, "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if v.Error() != nil {
|
||||
return nil, v.Error()
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func convertTemplate(ctx wfContext.Context, pd *packages.PackageDiscover, templ, id string, pCtx process.Context) (*value.Value, error) {
|
||||
@@ -464,11 +477,8 @@ func (exec *executor) doSteps(ctx wfContext.Context, v *value.Value) error {
|
||||
}
|
||||
return v.StepByFields(func(fieldName string, in *value.Value) (bool, error) {
|
||||
if in.CueValue().IncompleteKind() == cue.BottomKind {
|
||||
errInfo, err := sets.ToString(in.CueValue())
|
||||
if err != nil {
|
||||
errInfo = "value is _|_"
|
||||
}
|
||||
return true, errors.New(errInfo + "(bottom kind)")
|
||||
// continue if the field is incomplete
|
||||
return false, nil
|
||||
}
|
||||
if retErr := in.CueValue().Err(); retErr != nil {
|
||||
errInfo, err := sets.ToString(in.CueValue())
|
||||
@@ -517,7 +527,7 @@ func isStepList(fieldName string) bool {
|
||||
}
|
||||
|
||||
func debugLog(v *value.Value) bool {
|
||||
debug, _ := v.CueValue().LookupDef("#debug").Bool()
|
||||
debug, _ := v.CueValue().LookupPath(value.FieldPath("#debug")).Bool()
|
||||
return debug
|
||||
}
|
||||
|
||||
|
||||
@@ -109,8 +109,8 @@ myIP: value: "1.1.1.1"
|
||||
Type: "terminate",
|
||||
},
|
||||
{
|
||||
Name: "rendering",
|
||||
Type: "renderFailed",
|
||||
Name: "template",
|
||||
Type: "templateError",
|
||||
},
|
||||
{
|
||||
Name: "execute",
|
||||
@@ -141,9 +141,9 @@ myIP: value: "1.1.1.1"
|
||||
r.Equal(status.Message, "I am terminated")
|
||||
continue
|
||||
}
|
||||
if step.Name == "rendering" {
|
||||
if step.Name == "template" {
|
||||
r.Equal(status.Phase, common.WorkflowStepPhaseFailed)
|
||||
r.Equal(status.Reason, types.StatusReasonRendering)
|
||||
r.Equal(status.Reason, types.StatusReasonExecute)
|
||||
continue
|
||||
}
|
||||
if step.Name == "execute" {
|
||||
@@ -197,7 +197,7 @@ close({
|
||||
Name: "input-err",
|
||||
Type: "ok",
|
||||
Properties: &runtime.RawExtension{Raw: []byte(`
|
||||
{"score": {"y": 101}}
|
||||
{"score": {"x": 101}}
|
||||
`)},
|
||||
Inputs: common.StepInputs{{
|
||||
From: "score",
|
||||
@@ -241,10 +241,9 @@ close({
|
||||
status, operation, err := run.Run(wfCtx, &types.TaskRunOptions{})
|
||||
switch step.Name {
|
||||
case "input-err":
|
||||
r.Equal(operation.Waiting, false)
|
||||
r.Equal(status.Phase, common.WorkflowStepPhaseFailed)
|
||||
r.Equal(err.Error(), "do preStartHook: score.x: conflicting values 100 and 101")
|
||||
case "input":
|
||||
r.Equal(err.Error(), "do preStartHook: get input from [podIP]: var(path=podIP) not exist")
|
||||
r.Equal(err.Error(), "do preStartHook: get input from [podIP]: failed to lookup value: var(path=podIP) not exist")
|
||||
case "output-var-conflict":
|
||||
r.Contains(status.Message, "conflict")
|
||||
r.Equal(operation.Waiting, false)
|
||||
@@ -389,18 +388,18 @@ apply: {
|
||||
|
||||
#up: [process,{}]
|
||||
`,
|
||||
expected: "ok",
|
||||
expected: "okokok",
|
||||
hasErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
for i, tc := range testCases {
|
||||
echo = ""
|
||||
v, err := value.NewValue(tc.base, nil, "", value.TagFieldOrder)
|
||||
r.NoError(err)
|
||||
err = exec.doSteps(wfCtx, v)
|
||||
r.Equal(err != nil, tc.hasErr)
|
||||
r.Equal(echo, tc.expected)
|
||||
r.Equal(echo, tc.expected, i)
|
||||
}
|
||||
|
||||
}
|
||||
@@ -646,6 +645,14 @@ func TestValidateIfValue(t *testing.T) {
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "error if",
|
||||
step: v1beta1.WorkflowStep{
|
||||
If: `test == true`,
|
||||
},
|
||||
expectedErr: "invalid if value",
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
@@ -714,7 +721,7 @@ name: context.name
|
||||
return fmt.Sprintf(templ, "wait"), nil
|
||||
case "terminate":
|
||||
return fmt.Sprintf(templ, "terminate"), nil
|
||||
case "renderFailed":
|
||||
case "templateError":
|
||||
return `
|
||||
output: xx
|
||||
`, nil
|
||||
|
||||
@@ -94,7 +94,6 @@ func TestDebugApplicationWithWorkflow(t *testing.T) {
|
||||
},
|
||||
},
|
||||
step: "test-wf1",
|
||||
focus: "test",
|
||||
expectedErr: "failed to parse debug configmap",
|
||||
},
|
||||
"success": {
|
||||
|
||||
@@ -114,6 +114,7 @@ func getPrompt(cmd *cobra.Command, reader *bufio.Reader, description string, pro
|
||||
}
|
||||
}
|
||||
|
||||
// nolint:staticcheck
|
||||
func buildTemplateFromYAML(templateYAML string, def *pkgdef.Definition) error {
|
||||
templateYAMLBytes, err := utils.ReadRemoteOrLocalPath(templateYAML, false)
|
||||
if err != nil {
|
||||
|
||||
@@ -163,7 +163,7 @@ func NewInstallCommand(c common.Args, order string, ioStreams util.IOStreams) *c
|
||||
}
|
||||
// Step4: apply new CRDs
|
||||
if err := upgradeCRDs(cmd.Context(), kubeClient, chart); err != nil {
|
||||
return errors.New(fmt.Sprintf("upgrade CRD failure %s", err.Error()))
|
||||
return fmt.Errorf("upgrade CRD failure %w", err)
|
||||
}
|
||||
// Step5: Install or upgrade helm release
|
||||
release, err := installArgs.helmHelper.UpgradeChart(chart, kubeVelaReleaseName, installArgs.Namespace, values,
|
||||
|
||||
@@ -56,10 +56,6 @@ var _ = Describe("DefinitionFiles", func() {
|
||||
Description: "description not defined",
|
||||
Category: types.CUECategory,
|
||||
Parameters: []types.Parameter{
|
||||
{
|
||||
Type: cue.ListKind,
|
||||
Name: "env",
|
||||
},
|
||||
{
|
||||
Name: "image",
|
||||
Type: cue.StringKind,
|
||||
@@ -75,6 +71,10 @@ var _ = Describe("DefinitionFiles", func() {
|
||||
Default: int64(8080),
|
||||
Usage: "Which port do you want customer traffic sent to",
|
||||
},
|
||||
{
|
||||
Type: cue.ListKind,
|
||||
Name: "env",
|
||||
},
|
||||
},
|
||||
CrdInfo: &types.CRDInfo{
|
||||
APIVersion: "apps/v1",
|
||||
@@ -89,22 +89,25 @@ var _ = Describe("DefinitionFiles", func() {
|
||||
Type: types.TypeComponentDefinition,
|
||||
Description: "description not defined",
|
||||
Category: types.CUECategory,
|
||||
Parameters: []types.Parameter{{
|
||||
Name: "env", Type: cue.ListKind,
|
||||
}, {
|
||||
Name: "image",
|
||||
Type: cue.StringKind,
|
||||
Default: "",
|
||||
Short: "i",
|
||||
Required: true,
|
||||
Usage: "Which image would you like to use for your service",
|
||||
}, {
|
||||
Name: "port",
|
||||
Type: cue.IntKind,
|
||||
Short: "p",
|
||||
Default: int64(6379),
|
||||
Usage: "Which port do you want customer traffic sent to",
|
||||
}},
|
||||
Parameters: []types.Parameter{
|
||||
{
|
||||
Name: "image",
|
||||
Type: cue.StringKind,
|
||||
Default: "",
|
||||
Short: "i",
|
||||
Required: true,
|
||||
Usage: "Which image would you like to use for your service",
|
||||
}, {
|
||||
Name: "port",
|
||||
Type: cue.IntKind,
|
||||
Short: "p",
|
||||
Default: int64(6379),
|
||||
Usage: "Which port do you want customer traffic sent to",
|
||||
},
|
||||
{
|
||||
Name: "env", Type: cue.ListKind,
|
||||
},
|
||||
},
|
||||
CrdName: "deployments.apps",
|
||||
CrdInfo: &types.CRDInfo{
|
||||
APIVersion: "apps/v1",
|
||||
|
||||
@@ -122,6 +122,7 @@ func (ref *ParseReference) formatTableString(s string) string {
|
||||
}
|
||||
|
||||
// prepareConsoleParameter prepares the table content for each property
|
||||
// nolint:staticcheck
|
||||
func (ref *ParseReference) prepareConsoleParameter(tableName string, parameterList []ReferenceParameter, category types.CapabilityCategory) ConsoleReference {
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetColWidth(100)
|
||||
@@ -152,7 +153,7 @@ func (ref *ParseReference) prepareConsoleParameter(tableName string, parameterLi
|
||||
|
||||
// parseParameters parses every parameter
|
||||
// TODO(wonderflowe2e/plugin/plugin_test.go:122): refactor the code to reduce the complexity
|
||||
//nolint:gocyclo
|
||||
// nolint:staticcheck,gocyclo
|
||||
func (ref *ParseReference) parseParameters(capName string, paraValue cue.Value, paramKey string, depth int, containSuffix bool) (string, []ConsoleReference, error) {
|
||||
var doc string
|
||||
var console []ConsoleReference
|
||||
@@ -206,9 +207,17 @@ func (ref *ParseReference) parseParameters(capName string, paraValue cue.Value,
|
||||
case cue.StructKind:
|
||||
if subField, err := val.Struct(); err == nil && subField.Len() == 0 { // err cannot be not nil,so ignore it
|
||||
if mapValue, ok := val.Elem(); ok {
|
||||
source, converted := mapValue.Source().(*ast.Ident)
|
||||
if converted && len(source.Name) != 0 {
|
||||
param.PrintableType = fmt.Sprintf("map[string]:%s", source.Name)
|
||||
var ident *ast.Ident
|
||||
if source, ok := mapValue.Source().(*ast.Ident); ok {
|
||||
ident = source
|
||||
}
|
||||
if source, ok := mapValue.Source().(*ast.Field); ok {
|
||||
if v, ok := source.Value.(*ast.Ident); ok {
|
||||
ident = v
|
||||
}
|
||||
}
|
||||
if ident != nil && len(ident.Name) != 0 {
|
||||
param.PrintableType = fmt.Sprintf("map[string]:%s", ident.Name)
|
||||
} else {
|
||||
param.PrintableType = fmt.Sprintf("map[string]:%s", mapValue.IncompleteKind().String())
|
||||
}
|
||||
|
||||
@@ -23,7 +23,6 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/crossplane/crossplane-runtime/pkg/test"
|
||||
@@ -592,5 +591,5 @@ parameter: {
|
||||
err = w.Close()
|
||||
assert.NoError(t, err)
|
||||
out, _ := ioutil.ReadAll(r)
|
||||
assert.True(t, strings.Contains(string(out), "map[string]:#KeySecret"))
|
||||
assert.Contains(t, string(out), "map[string]:#KeySecret")
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user