环境文化建设方案网站,3d建模师工资一般多少,wordpress 漏洞 2014,动感技术网站建设容器云提交spark job任务
容器云提交KindJob类型的spark任务#xff0c;首先需要申请具有Job任务提交权限的rbac#xff0c;然后编写对应的yaml文件#xff0c;通过spark内置的spark-submit命令#xff0c;提交用户程序(jar包)到集群执行。
1、创建任务job提交权限rbac
…容器云提交spark job任务
容器云提交KindJob类型的spark任务首先需要申请具有Job任务提交权限的rbac然后编写对应的yaml文件通过spark内置的spark-submit命令提交用户程序(jar包)到集群执行。
1、创建任务job提交权限rbac
创建rbac账户并分配资源权限Pod服务账户创建参考,kubernetes api查询命令(kubectl api-resources)
cat ecc-recommend-rbac.yaml EOF
---
apiVersion: v1
kind: Namespace
metadata:name: item-dev-recommendlabels:name: item-dev-recommend
---
#基于namespace创建服务账户spark-cdp
apiVersion: v1
kind: ServiceAccount
metadata:name: spark-cdpnamespace: item-dev-recommend---
#创建角色资源权限
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:name: spark-cdpnamespace: item-dev-recommend
rules:- apiGroups:- resources:- podsverbs:- *- apiGroups:- resources:- configmapsverbs:- *- apiGroups:- resources:- services- secretsverbs:- create- get- delete- apiGroups:- extensionsresources:- ingressesverbs:- create- get- delete- apiGroups:- resources:- nodesverbs:- get- apiGroups:- resources:- resourcequotasverbs:- get- list- watch- apiGroups:- resources:- eventsverbs:- create- update- patch- apiGroups:- apiextensions.k8s.ioresources:- customresourcedefinitionsverbs:- create- get- update- delete- apiGroups:- admissionregistration.k8s.ioresources:- mutatingwebhookconfigurations- validatingwebhookconfigurationsverbs:- create- get- update- delete- apiGroups:- sparkoperator.k8s.ioresources:- sparkapplications- scheduledsparkapplications- sparkapplications/status- scheduledsparkapplications/statusverbs:- *- apiGroups:- scheduling.volcano.shresources:- podgroups- queues- queues/statusverbs:- get- list- watch- create- delete- update- apiGroups:- batchresources:- cronjobs- jobsverbs:- * ---
#服务账户spark-cdp绑定角色
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:name: spark-cdpnamespace: item-dev-recommend
roleRef:apiGroup: rbac.authorization.k8s.iokind: Rolename: spark-cdp
subjects:- kind: ServiceAccountname: spark-cdpEOF2、spark pv,pvc
构建pv 挂载NFS定义pv访问模式(accessModes)和存储容量(capacity)
cat ecc-recommend-pv.yaml EOF
apiVersion: v1
kind: PersistentVolume
metadata:name: dev-cdp-pv01namespace: item-dev-recommend
spec:capacity:storage: 10GiaccessModes:#访问三种模式ReadWriteOnce,ReadOnlyMany,ReadWriteMany- ReadWriteOncenfs:path: /data/nfsserver: 192.168.0.135EOF构建pvc
cat ecc-recommend-pvc.yaml EOF
apiVersion: v1
kind: PersistentVolumeClaim
metadata:name: dev-cdp-pvc01namespace: item-dev-recommend
spec:accessModes:#匹配模式- ReadWriteOnceresources:requests:storage: 10GiEOF3、spark-submit任务提交
将java/scala程序包开发完成后通过spark-submit命令提交jar包到集群执行。
cat ecc-recommend-sparksubmit.yaml EOF
---
apiVersion: batch/v1
kind: Job
metadata:name: item-recommend-jobnamespace: item-dev-recommendlabels:k8s-app: item-recommend-job
spec:template:metadata:labels:k8s-app: item-recommend-jobspec:containers:name: item-recommend-job- args:- /opt/spark/bin/spark-submit- --class- com.www.ecc.com.recommend.ItemRecommender- --master- k8s://https:/$(KUBERNETES_SERVICE_HOST):$(KUBERNETES_SERVICE_PORT)- --name- item-recommend-job- --jars- /opt/spark/jars/spark-cassandra-connector_2.11-2.3.4.jar- --conf- spark.kubernetes.authenticate.caCertFile/var/run/secrets/kubernetes.io/serviceaccount/ca.crt- --conf- spark.kubernetes.authenticate.oauthTokenFile/var/run/secrets/kubernetes.io/serviceaccount/token- --conf- spark.kubernetes.driver.limit.cores3- --conf- spark.kubernetes.executor.limit.cores8- --conf- spark.kubernetes.driver.limit.memory5g- --conf- spark.kubernetes.executor.limit.memory32g- --conf- spark.executor.instances8- --conf- spark.sql.crossJoin.enabletrue- --conf- spark.executor.cores6- --conf- spark.executor.memory32g- --conf- spark.driver.cores3- --conf- spark.dirver.memory5g- --conf- spark.sql.autoBroadcastJoinThreshold-1- --conf- spark.kubernetes.namespaceitem-dev-recommend- --conf- spark.driver.port45970- --conf- spark.blockManager.port45980- --conf- spark.kubernetes.container.imageacpimagehub.ecc.cn/spark:3.11- --conf- spark.executor.extraJavaOptions-Duser.timezoneGMT08:00- --conf- spark.driver.extraJavaOptions-Duser.timezoneGMT08:00- --conf- spark.default.parallelism500- /odsdata/item-recommender-1.0.0-SNAPSHOT.jar- env:- name: SPARK_SHUFFLE_PARTITIONSvalue: 100- name: CASSANDR_HOSTvalue: 192.168.0.1,192.168.0.2,192.168.0.3- name: CASSANDRA_PORTvalue: 9042- name: AUTH_USERNAMEvalue: user- name: AUTH_PASSWORDvalue: 123456image: acpimagehub.ecc.cn/spark:3.11imagePullPolicy: IfNotPresentports:- containerPort: 9000name: 9000tcp2protocol: TCPresources:limits:cpu: 3memory: 2Girequests:cpu: 3memory: 2GivolumeMounts:- mountPath: /odsdataname: item-spark-pvcvolumes:- name: item-spark-pvcpersistentVolumeClaim:claimName: dev-cdp-pvc01dnsPolicy: ClusterFirstrestartPolicy: Neverhostname: item-recommend-jobsecurityContext: {}serviceAccountName: spark-cdp
---
apiVersion: v1
kind: Service
metadata:name: item-recommend-jobnamespace: item-dev-recommend
spec:type: NodePortports:- name: sparkjob-tcp4040port: 4040protocol: TCPtargetPort: 4040#spark driver port- name: sparkjob-tcp-45970port: 45970protocol: TCPtargetPort: 45970#spark ui- name: sparkjob-tcp-48080port: 48080protocol: TCPtargetPort: 48080#spark executor port- name: sparkjob-tcp-45980port: 45980protocol: TCPtargetPort: 45980selector:k8s-app: item-recommend-jobEOF4、打包插件小记
buildresourcesresourcedirectorysrc/main/resources/directoryincludesinclude*.properties/include/includesfilteringfalse/filtering/resource/resourcespluginsplugingroupIdorg.apache.maven.plugins/groupIdartifactIdmaven-surefire-plugin/artifactIdconfigurationskipTeststrue/skipTests/configuration/pluginplugingroupIdorg.apache.maven.plugins/groupIdartifactIdmaven-compiler-plugin/artifactIdversion3.6.1/versionconfigurationsource${java.version}/sourcetarget${java.version}/targetencoding${project.build.sourceEncoding}/encoding/configurationexecutionsexecutionphasecompile/phasegoalsgoalcompile/goal/goals/execution/executions/pluginplugingroupIdnet.alchim31.maven/groupIdartifactIdscala-maven-plugin/artifactIdversion3.2.2/versionexecutionsexecutionidscala-compile-first/idphaseprocess-resources/phasegoalsgoaladd-source/goalgoalcompile/goalgoaltestCompile/goal/goals/execution/executions/pluginplugingroupIdorg.apache.maven.plugins/groupIdartifactIdmaven-shade-plugin/artifactIdversion3.2.1/versionexecutionsexecutionphasepackage/phasegoalsgoalshade/goal/goals/execution/executions/plugin/plugins/build