Use Shared Services for Crawling, Redis, Profile Browsers (#1088)
* refactor to use shared role-based service shared across pods: - 'crawler' service for all crawler screencasting, scales 0 .. N with crawler-<ID>-N.crawl - 'redis' service for all redis access, redis-<ID>-0.redis - 'browser' service for all browser access (profile browsers), browser-<ID>-0.browser - don't create a new service per crawl/profile at all - enable 'publishNotReadyAddresses' for potentially faster resolving, esp for redis - remove service as type managed by operator as no longer creating services dynamically - remove frontend var CRAWLER_SVC_SUFFIX, suffix always '.crawler' to match crawler service name
This commit is contained in:
parent
e7f2d93f80
commit
989ed2a8da
@ -58,9 +58,8 @@ class K8sAPI:
|
||||
|
||||
def get_redis_url(self, crawl_id):
|
||||
"""get redis url for crawl id"""
|
||||
redis_id = f"redis-{crawl_id}"
|
||||
redis_url = (
|
||||
f"redis://{redis_id}-0.{redis_id}.{self.namespace}.svc.cluster.local/0"
|
||||
f"redis://redis-{crawl_id}-0.redis.{self.namespace}.svc.cluster.local/0"
|
||||
)
|
||||
return redis_url
|
||||
|
||||
|
||||
@ -262,7 +262,7 @@ class BtrixOperator(K8sAPI):
|
||||
|
||||
has_redis_children = redis_sts in data.children[STS]
|
||||
if has_redis_children:
|
||||
children[2]["spec"]["volumeClaimTemplates"] = data.children[STS][redis_sts][
|
||||
children[1]["spec"]["volumeClaimTemplates"] = data.children[STS][redis_sts][
|
||||
"spec"
|
||||
]["volumeClaimTemplates"]
|
||||
|
||||
|
||||
@ -315,12 +315,11 @@ class ProfileOps:
|
||||
|
||||
async def _send_browser_req(self, browserid, path, method="GET", json=None):
|
||||
"""make request to browser api to get state"""
|
||||
browser_host = f"browser-{browserid}-0.browser-{browserid}"
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.request(
|
||||
method,
|
||||
f"http://{browser_host}{self.browser_fqdn_suffix}:9223{path}",
|
||||
f"http://browser-{browserid}-0.browser{self.browser_fqdn_suffix}:9223{path}",
|
||||
json=json,
|
||||
) as resp:
|
||||
json = await resp.json()
|
||||
|
||||
@ -17,7 +17,7 @@ spec:
|
||||
crawl: {{ id }}
|
||||
role: crawler
|
||||
|
||||
serviceName: crawl-{{ id }}
|
||||
serviceName: crawler
|
||||
replicas: {{ scale }}
|
||||
podManagementPolicy: OrderedReady
|
||||
|
||||
@ -165,23 +165,3 @@ spec:
|
||||
periodSeconds: 120
|
||||
failureThreshold: 3
|
||||
{% endif %}
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: crawl-{{ id }}
|
||||
labels:
|
||||
crawl: {{ id }}
|
||||
role: crawler
|
||||
|
||||
spec:
|
||||
clusterIP: None
|
||||
selector:
|
||||
crawl: {{ id }}
|
||||
role: crawler
|
||||
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 9037
|
||||
name: screencast
|
||||
|
||||
@ -14,7 +14,7 @@ spec:
|
||||
browser: {{ id }}
|
||||
role: browser
|
||||
|
||||
serviceName: browser-{{ id }}
|
||||
serviceName: browser
|
||||
replicas: 1
|
||||
|
||||
template:
|
||||
@ -82,27 +82,3 @@ spec:
|
||||
- name: CHROME_FLAGS
|
||||
value: "--proxy-server=socks5://{{ crawler_socks_proxy_host }}:{{ crawler_socks_proxy_port | default('9050') }}"
|
||||
{% endif %}
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: browser-{{ id }}
|
||||
labels:
|
||||
browser: {{ id }}
|
||||
role: browser
|
||||
|
||||
spec:
|
||||
clusterIP: None
|
||||
selector:
|
||||
browser: {{ id }}
|
||||
role: browser
|
||||
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 9223
|
||||
name: browser-api
|
||||
|
||||
- protocol: TCP
|
||||
port: 9222
|
||||
name: browser-ws
|
||||
|
||||
|
||||
@ -17,7 +17,7 @@ spec:
|
||||
crawl: {{ id }}
|
||||
role: redis
|
||||
|
||||
serviceName: redis-{{ id }}
|
||||
serviceName: redis
|
||||
replicas: {{ redis_scale }}
|
||||
podManagementPolicy: Parallel
|
||||
|
||||
@ -115,23 +115,3 @@ spec:
|
||||
command:
|
||||
- redis-cli
|
||||
- ping
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: redis-{{ id }}
|
||||
labels:
|
||||
crawl: {{ id }}
|
||||
role: redis
|
||||
|
||||
spec:
|
||||
clusterIP: None
|
||||
selector:
|
||||
crawl: {{ id }}
|
||||
role: redis
|
||||
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 6379
|
||||
name: redis
|
||||
|
||||
@ -41,9 +41,6 @@ spec:
|
||||
- name: CRAWLER_FQDN_SUFFIX
|
||||
value: ".{{ .Values.crawler_namespace }}.svc.cluster.local"
|
||||
|
||||
- name: CRAWLER_SVC_SUFFIX
|
||||
value: ".crawl-$crawl"
|
||||
|
||||
- name: NGINX_ENTRYPOINT_WORKER_PROCESSES_AUTOTUNE
|
||||
value: "1"
|
||||
|
||||
|
||||
@ -15,11 +15,6 @@ spec:
|
||||
updateStrategy:
|
||||
method: InPlace
|
||||
|
||||
- apiVersion: v1
|
||||
resource: services
|
||||
updateStrategy:
|
||||
method: OnDelete
|
||||
|
||||
hooks:
|
||||
sync:
|
||||
webhook:
|
||||
|
||||
57
chart/templates/service.yaml
Normal file
57
chart/templates/service.yaml
Normal file
@ -0,0 +1,57 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: crawler
|
||||
namespace: {{ .Values.crawler_namespace }}
|
||||
|
||||
spec:
|
||||
clusterIP: None
|
||||
publishNotReadyAddresses: true
|
||||
selector:
|
||||
role: crawler
|
||||
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 9037
|
||||
name: screencast
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: redis
|
||||
namespace: {{ .Values.crawler_namespace }}
|
||||
|
||||
spec:
|
||||
clusterIP: None
|
||||
publishNotReadyAddresses: true
|
||||
selector:
|
||||
role: redis
|
||||
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 6379
|
||||
name: redis
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: browser
|
||||
namespace: {{ .Values.crawler_namespace }}
|
||||
|
||||
spec:
|
||||
clusterIP: None
|
||||
publishNotReadyAddresses: true
|
||||
selector:
|
||||
role: browser
|
||||
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 9223
|
||||
name: browser-api
|
||||
|
||||
- protocol: TCP
|
||||
port: 9222
|
||||
name: browser-ws
|
||||
@ -54,7 +54,7 @@ server {
|
||||
set $crawl $2;
|
||||
set $num $3;
|
||||
set $auth_bearer $arg_auth_bearer;
|
||||
set $svc_suffix "${CRAWLER_SVC_SUFFIX}";
|
||||
set $svc_suffix ".crawler";
|
||||
set $fqdn_suffix "${CRAWLER_FQDN_SUFFIX}";
|
||||
|
||||
auth_request /access_check;
|
||||
@ -88,7 +88,7 @@ server {
|
||||
|
||||
auth_request /access_check_profiles;
|
||||
|
||||
proxy_pass http://browser-$browserid-0.browser-$browserid$fqdn_suffix:6080/websockify;
|
||||
proxy_pass http://browser-$browserid-0.browser$fqdn_suffix:6080/websockify;
|
||||
proxy_set_header Host "localhost";
|
||||
|
||||
proxy_send_timeout 10m;
|
||||
@ -107,7 +107,7 @@ server {
|
||||
|
||||
auth_request /access_check_profiles;
|
||||
|
||||
proxy_pass http://browser-$browserid-0.browser-$browserid$fqdn_suffix:9223/vnc/;
|
||||
proxy_pass http://browser-$browserid-0.browser$fqdn_suffix:9223/vnc/;
|
||||
proxy_set_header Host "localhost";
|
||||
}
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user