-
Notifications
You must be signed in to change notification settings - Fork 8
/
scrapyd_k8s.sample-k8s.conf
74 lines (63 loc) · 2.29 KB
/
scrapyd_k8s.sample-k8s.conf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#
# Sample scrapyd-k8s configuration for a Kubernetes cluster
#
[scrapyd]
bind_address = 0.0.0.0
http_port = 6800
# Optional basic http authentication for all endpoints.
#username = sampleusername
#password = changeme123
# Kubernetes works with a remote repository and the Kubernetes launcher.
repository = scrapyd_k8s.repository.Remote
launcher = scrapyd_k8s.launcher.K8s
# Namespace to work in (needs to exist).
# Check RBAC if you run scrapyd-k8s in a different namespace than spiders.
namespace = default
# Optional pull secret, in case you have private spiders.
#pull_secret = ghcr-registry
# For each project, define a project section.
# This contains a repository that points to the remote container repository.
# An optional env_secret is the name of a secret with additional environment
# variables to run the spiders with; similarly env_config for a configmap.
[project.example]
env_secret = example-env-secret
env_config = example-env-configmap
repository = ghcr.io/q-m/scrapyd-k8s-spider-example
#
# Resources
#
# Resource requests and limits are completely optional, so you can omit all of
# these sections if you like - though on production it is recommended to set
# them. Resources can be set at a global level, project level, and spider level.
# Each more specific level overrides any previous, so you can set limited
# default resources and give a specific project spider more headroom.
# Resource requests and limits for all projects, can be overridden per project.
[default.resources]
requests_cpu = 100m
requests_memory = 0.2G
limits_cpu = 0.8
limits_memory = 0.5G
# Resource requests and limits for the example project, can be overridden per spider.
[project.example.resources]
requests_cpu = 90m
requests_memory = 0.15G
limits_cpu = 0.6
limits_memory = 0.3G
# Resource requests and limits for a specific spider in the example project.
[project.example.quotes.resources]
requests_cpu = 80m
requests_memory = 0.12G
limits_cpu = 0.5
limits_memory = 0.2G
#[joblogs]
# Choose storage provider
#storage_provider = s3
#container_name = scrapyd-k8s-example-bucket
# Choose number of unique logs, but at least 2
#num_lines_to_check = 2
#[joblogs.storage.s3]
# Set your S3 key as ENV or below
#key = ${S3_KEY}
# Set your S3 secret key as ENV or below
#secret = ${S3_SECRET}
#region = eu-north-1