Skip to content

Commit

Permalink
Merge branch 'nickvsnetworking:service_overhaul' into service_overhaul
Browse files Browse the repository at this point in the history
  • Loading branch information
zarya authored Oct 5, 2023
2 parents 9585b4c + 09f461a commit 7a8c33c
Show file tree
Hide file tree
Showing 12 changed files with 236 additions and 222 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,20 +23,23 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Configurable redis connection (Unix socket or TCP)
- Basic database upgrade support in tools/databaseUpgrade
- PCSCF state storage in ims_subscriber
- (Experimental) Working horizontal scalability

### Changed

- Split logical functions of PyHSS into 6 service processes
- Logtool no longer handles metric processing
- Updated config.yaml
- Gx CCR-T now flushes PGW / IMS data, depending on Called-Station-Id
- Benchmarked lossless at ~100 diameter requests per second, per hssService.

### Fixed

- Memory leaking in diameter.py
- Gx CCA now supports apn inside a plmn based uri
- AVP_Preemption_Capability and AVP_Preemption_Vulnerability now presents correctly in all diameter messages
- Crash when webhook or geored endpoints enabled and no peers defined
- CPU overutilization on all services

### Removed

Expand Down
9 changes: 6 additions & 3 deletions config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,6 @@ hss:
#The maximum time to wait, in seconds, before disconnecting a client when no data is received.
client_socket_timeout: 120

#Enable benchmarking log output for response times - set to False in production.
enable_benchmarking: False

#The maximum time to wait, in seconds, before disconnecting a client when no data is received.
client_socket_timeout: 300

Expand Down Expand Up @@ -71,6 +68,12 @@ hss:
api:
page_size: 200

benchmarking:
# Whether to enable benchmark logging
enabled: True
# How often to report, in seconds. Not all benchmarking supports interval reporting.
reporting_interval: 3600

eir:
imsi_imei_logging: True #Store current IMEI / IMSI pair in backend
no_match_response: 2 #Greylist
Expand Down
37 changes: 20 additions & 17 deletions lib/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -864,13 +864,15 @@ def handleGeored(self, jsonData, operation: str="PATCH", asymmetric: bool=False,
if len(self.config.get('geored', {}).get('endpoints', [])) > 0:
georedDict['body'] = jsonData
georedDict['operation'] = operation
self.redisMessaging.sendMessage(queue=f'geored-{uuid.uuid4()}-{time.time_ns()}', message=json.dumps(georedDict), queueExpiry=120)
georedDict['timestamp'] = time.time_ns()
self.redisMessaging.sendMessage(queue=f'geored', message=json.dumps(georedDict), queueExpiry=120)
if asymmetric:
if len(asymmetricUrls) > 0:
georedDict['body'] = jsonData
georedDict['operation'] = operation
georedDict['timestamp'] = time.time_ns()
georedDict['urls'] = asymmetricUrls
self.redisMessaging.sendMessage(queue=f'asymmetric-geored-{uuid.uuid4()}-{time.time_ns()}', message=json.dumps(georedDict), queueExpiry=120)
self.redisMessaging.sendMessage(queue=f'asymmetric-geored', message=json.dumps(georedDict), queueExpiry=120)
return True

except Exception as E:
Expand All @@ -897,7 +899,8 @@ def handleWebhook(self, objectData, operation: str="PATCH"):
webhook['body'] = self.Sanitize_Datetime(objectData)
webhook['headers'] = webhookHeaders
webhook['operation'] = operation
self.redisMessaging.sendMessage(queue=f'webhook-{uuid.uuid4()}-{time.time_ns()}', message=json.dumps(webhook), queueExpiry=120)
webhook['timestamp'] = time.time_ns()
self.redisMessaging.sendMessage(queue=f'webhook', message=json.dumps(webhook), queueExpiry=120)
return True

def Sanitize_Datetime(self, result):
Expand Down Expand Up @@ -1580,7 +1583,7 @@ def Update_Serving_MME(self, imsi, serving_mme, serving_mme_realm=None, serving_
self.logTool.log(service='Database', level='debug', message="Updating serving MME & Timestamp", redisClient=self.redisMessaging)
result.serving_mme = serving_mme
try:
if serving_mme_timestamp is not None and serving_mme_timestamp is not 'None':
if serving_mme_timestamp != None and serving_mme_timestamp != 'None':
result.serving_mme_timestamp = datetime.strptime(serving_mme_timestamp, '%Y-%m-%dT%H:%M:%SZ')
result.serving_mme_timestamp = result.serving_mme_timestamp.replace(tzinfo=timezone.utc)
serving_mme_timestamp_string = result.serving_mme_timestamp.strftime('%Y-%m-%dT%H:%M:%SZ')
Expand Down Expand Up @@ -1615,8 +1618,8 @@ def Update_Serving_MME(self, imsi, serving_mme, serving_mme_realm=None, serving_
self.handleGeored({
"imsi": str(imsi),
"serving_mme": result.serving_mme,
"serving_mme_realm": str(result.serving_mme_realm),
"serving_mme_peer": str(result.serving_mme_peer),
"serving_mme_realm": result.serving_mme_realm,
"serving_mme_peer": result.serving_mme_peer,
"serving_mme_timestamp": serving_mme_timestamp_string
})
else:
Expand All @@ -1643,7 +1646,7 @@ def Update_Proxy_CSCF(self, imsi, proxy_cscf, pcscf_realm=None, pcscf_peer=None,
result.pcscf = proxy_cscf
result.pcscf_active_session = pcscf_active_session
try:
if pcscf_timestamp is not None and pcscf_timestamp is not 'None':
if pcscf_timestamp != None and pcscf_timestamp != 'None':
result.pcscf_timestamp = datetime.strptime(pcscf_timestamp, '%Y-%m-%dT%H:%M:%SZ')
result.pcscf_timestamp = result.pcscf_timestamp.replace(tzinfo=timezone.utc)
pcscf_timestamp_string = result.pcscf_timestamp.strftime('%Y-%m-%dT%H:%M:%SZ')
Expand Down Expand Up @@ -1673,7 +1676,7 @@ def Update_Proxy_CSCF(self, imsi, proxy_cscf, pcscf_realm=None, pcscf_peer=None,
if propagate == True:
if 'IMS' in self.config['geored']['sync_actions'] and self.config['geored']['enabled'] == True:
self.logTool.log(service='Database', level='debug', message="Propagate IMS changes to Geographic PyHSS instances", redisClient=self.redisMessaging)
self.handleGeored({"imsi": str(imsi), "pcscf": result.pcscf, "pcscf_realm": str(result.pcscf_realm), "pcscf_timestamp": pcscf_timestamp_string, "pcscf_peer": str(result.pcscf_peer), "pcscf_active_session": str(pcscf_active_session)})
self.handleGeored({"imsi": str(imsi), "pcscf": result.pcscf, "pcscf_realm": result.pcscf_realm, "pcscf_timestamp": pcscf_timestamp_string, "pcscf_peer": result.pcscf_peer, "pcscf_active_session": pcscf_active_session})
else:
self.logTool.log(service='Database', level='debug', message="Config does not allow sync of IMS events", redisClient=self.redisMessaging)
except Exception as E:
Expand All @@ -1698,7 +1701,7 @@ def Update_Serving_CSCF(self, imsi, serving_cscf, scscf_realm=None, scscf_peer=N
serving_cscf = serving_cscf.replace("sip:sip:", "sip:")
result.scscf = serving_cscf
try:
if scscf_timestamp is not None and scscf_timestamp is not 'None':
if scscf_timestamp != None and scscf_timestamp != 'None':
result.scscf_timestamp = datetime.strptime(scscf_timestamp, '%Y-%m-%dT%H:%M:%SZ')
result.scscf_timestamp = result.scscf_timestamp.replace(tzinfo=timezone.utc)
scscf_timestamp_string = result.scscf_timestamp.strftime('%Y-%m-%dT%H:%M:%SZ')
Expand Down Expand Up @@ -1727,7 +1730,7 @@ def Update_Serving_CSCF(self, imsi, serving_cscf, scscf_realm=None, scscf_peer=N
if propagate == True:
if 'IMS' in self.config['geored']['sync_actions'] and self.config['geored']['enabled'] == True:
self.logTool.log(service='Database', level='debug', message="Propagate IMS changes to Geographic PyHSS instances", redisClient=self.redisMessaging)
self.handleGeored({"imsi": str(imsi), "scscf": result.scscf, "scscf_realm": str(result.scscf_realm), "scscf_timestamp": scscf_timestamp_string, "scscf_peer": str(result.scscf_peer)})
self.handleGeored({"imsi": str(imsi), "scscf": result.scscf, "scscf_realm": result.scscf_realm, "scscf_timestamp": scscf_timestamp_string, "scscf_peer": result.scscf_peer})
else:
self.logTool.log(service='Database', level='debug', message="Config does not allow sync of IMS events", redisClient=self.redisMessaging)
except Exception as E:
Expand Down Expand Up @@ -1770,7 +1773,7 @@ def Update_Serving_APN(self, imsi, apn, pcrf_session_id, serving_pgw, subscriber
self.logTool.log(service='Database', level='debug', message="APN ID is " + str(apn_id), redisClient=self.redisMessaging)

try:
if serving_pgw_timestamp is not None and serving_pgw_timestamp is not 'None':
if serving_pgw_timestamp != None and serving_pgw_timestamp != 'None':
serving_pgw_timestamp = datetime.strptime(serving_pgw_timestamp, '%Y-%m-%dT%H:%M:%SZ')
serving_pgw_timestamp = serving_pgw_timestamp.replace(tzinfo=timezone.utc)
serving_pgw_timestamp_string = serving_pgw_timestamp.strftime('%Y-%m-%dT%H:%M:%SZ')
Expand Down Expand Up @@ -1836,13 +1839,13 @@ def Update_Serving_APN(self, imsi, apn, pcrf_session_id, serving_pgw, subscriber
if 'PCRF' in self.config['geored']['sync_actions'] and self.config['geored']['enabled'] == True:
self.logTool.log(service='Database', level='debug', message="Propagate PCRF changes to Geographic PyHSS instances", redisClient=self.redisMessaging)
self.handleGeored({"imsi": str(imsi),
'serving_apn' : str(apn),
'pcrf_session_id': str(pcrf_session_id),
'serving_pgw': str(serving_pgw),
'serving_pgw_realm': str(serving_pgw_realm),
'serving_pgw_peer': str(serving_pgw_peer),
'serving_apn' : apn,
'pcrf_session_id': pcrf_session_id,
'serving_pgw': serving_pgw,
'serving_pgw_realm': serving_pgw_realm,
'serving_pgw_peer': serving_pgw_peer,
'serving_pgw_timestamp': serving_pgw_timestamp_string,
'subscriber_routing': str(subscriber_routing)
'subscriber_routing': subscriber_routing
})
else:
self.logTool.log(service='Database', level='debug', message="Config does not allow sync of PCRF events", redisClient=self.redisMessaging)
Expand Down
Loading

0 comments on commit 7a8c33c

Please sign in to comment.