37 lines
1.1 KiB
SYSTEMD
37 lines
1.1 KiB
SYSTEMD
[Unit]
|
|
Description=Agricultural Stock Spark Data Processor Scheduler
|
|
Documentation=https://github.com/your-repo/agricultural-stock-platform
|
|
After=network.target mysql.service
|
|
|
|
[Service]
|
|
Type=simple
|
|
User=spark
|
|
Group=spark
|
|
WorkingDirectory=/opt/agricultural-stock-platform/spark-processor
|
|
ExecStart=/usr/bin/java -Xmx4g -Xms2g -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Dlogback.configurationFile=src/main/resources/logback.xml -Dconfig.file=src/main/resources/application-prod.conf -jar target/spark-data-processor-1.0.0.jar scheduler
|
|
ExecStop=/bin/kill -TERM $MAINPID
|
|
Restart=on-failure
|
|
RestartSec=30
|
|
StandardOutput=journal
|
|
StandardError=journal
|
|
SyslogIdentifier=spark-scheduler
|
|
|
|
# 环境变量
|
|
Environment=JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
|
|
Environment=SPARK_HOME=/opt/spark
|
|
Environment=PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
|
|
|
# 资源限制
|
|
LimitNOFILE=65536
|
|
LimitNPROC=4096
|
|
|
|
# 安全设置
|
|
NoNewPrivileges=true
|
|
PrivateTmp=true
|
|
ProtectSystem=strict
|
|
ProtectHome=true
|
|
ReadWritePaths=/opt/agricultural-stock-platform/spark-processor/logs
|
|
ReadWritePaths=/tmp/spark-warehouse
|
|
|
|
[Install]
|
|
WantedBy=multi-user.target |