Browse Source

Merge branch 'master' into skip-creating-duplicate-monitors

pull/746/head
Ross McFarland 4 years ago
committed by GitHub
parent
commit
1aa78b87a6
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 2233 additions and 26 deletions
  1. +9
    -1
      CHANGELOG.md
  2. +1
    -0
      README.md
  3. +623
    -0
      octodns/provider/gcore.py
  4. +3
    -3
      octodns/provider/mythicbeasts.py
  5. +65
    -17
      octodns/provider/ns1.py
  6. +245
    -0
      tests/fixtures/gcore-no-changes.json
  7. +428
    -0
      tests/fixtures/gcore-records.json
  8. +27
    -0
      tests/fixtures/gcore-zone.json
  9. +672
    -0
      tests/test_octodns_provider_gcore.py
  10. +160
    -5
      tests/test_octodns_provider_ns1.py

+ 9
- 1
CHANGELOG.md View File

@ -4,6 +4,14 @@
* NS1 NA target now includes `SX` and `UM`. If `NA` continent is in use in
dynamic records care must be taken to upgrade/downgrade to v0.9.13.
* Ns1Provider now supports a new parameter, shared_notifylist, which results in
all dynamic record monitors using a shared notify list named 'octoDNS NS1
Notify List'. Only newly created record values will use the shared notify
list. It should be safe to enable this functionality, but existing records
will not be converted. Note: Once this option is enabled downgrades to
previous versions of octoDNS are discouraged and may result in undefined
behavior and broken records. See https://github.com/octodns/octodns/pull/749
for related discussion.
## v0.9.13 - 2021-07-18 - Processors Alpha
@ -17,7 +25,7 @@
* Fixes NS1 provider's geotarget limitation of using `NA` continent. Now, when
`NA` is used in geos it considers **all** the countries of `North America`
insted of just `us-east`, `us-west` and `us-central` regions
* `SX' & 'UM` country support added to NS1Provider, not yet in the North
* `SX' & 'UM` country support added to NS1Provider, not yet in the North
America list for backwards compatibility reasons. They will be added in the
next releaser.


+ 1
- 0
README.md View File

@ -204,6 +204,7 @@ The above command pulled the existing data out of Route53 and placed the results
| [EtcHostsProvider](/octodns/provider/etc_hosts.py) | | A, AAAA, ALIAS, CNAME | No | |
| [EnvVarSource](/octodns/source/envvar.py) | | TXT | No | read-only environment variable injection |
| [GandiProvider](/octodns/provider/gandi.py) | | A, AAAA, ALIAS, CAA, CNAME, DNAME, MX, NS, PTR, SPF, SRV, SSHFP, TXT | No | |
| [GCoreProvider](/octodns/provider/gcore.py) | | A, AAAA, NS, MX, TXT, SRV, CNAME, PTR | Dynamic | |
| [GoogleCloudProvider](/octodns/provider/googlecloud.py) | google-cloud-dns | A, AAAA, CAA, CNAME, MX, NAPTR, NS, PTR, SPF, SRV, TXT | No | |
| [HetznerProvider](/octodns/provider/hetzner.py) | | A, AAAA, CAA, CNAME, MX, NS, SRV, TXT | No | |
| [MythicBeastsProvider](/octodns/provider/mythicbeasts.py) | Mythic Beasts | A, AAAA, ALIAS, CNAME, MX, NS, SRV, SSHFP, CAA, TXT | No | |


+ 623
- 0
octodns/provider/gcore.py View File

@ -0,0 +1,623 @@
#
#
#
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from collections import defaultdict
from requests import Session
import http
import logging
import urllib.parse
from ..record import GeoCodes
from ..record import Record
from .base import BaseProvider
class GCoreClientException(Exception):
def __init__(self, r):
super(GCoreClientException, self).__init__(r.text)
class GCoreClientBadRequest(GCoreClientException):
def __init__(self, r):
super(GCoreClientBadRequest, self).__init__(r)
class GCoreClientNotFound(GCoreClientException):
def __init__(self, r):
super(GCoreClientNotFound, self).__init__(r)
class GCoreClient(object):
ROOT_ZONES = "zones"
def __init__(
self,
log,
api_url,
auth_url,
token=None,
token_type=None,
login=None,
password=None,
):
self.log = log
self._session = Session()
self._api_url = api_url
if token is not None and token_type is not None:
self._session.headers.update(
{"Authorization": "{} {}".format(token_type, token)}
)
elif login is not None and password is not None:
token = self._auth(auth_url, login, password)
self._session.headers.update(
{"Authorization": "Bearer {}".format(token)}
)
else:
raise ValueError("either token or login & password must be set")
def _auth(self, url, login, password):
# well, can't use _request, since API returns 400 if credentials
# invalid which will be logged, but we don't want do this
r = self._session.request(
"POST",
self._build_url(url, "auth", "jwt", "login"),
json={"username": login, "password": password},
)
r.raise_for_status()
return r.json()["access"]
def _request(self, method, url, params=None, data=None):
r = self._session.request(
method, url, params=params, json=data, timeout=30.0
)
if r.status_code == http.HTTPStatus.BAD_REQUEST:
self.log.error(
"bad request %r has been sent to %r: %s", data, url, r.text
)
raise GCoreClientBadRequest(r)
elif r.status_code == http.HTTPStatus.NOT_FOUND:
self.log.error("resource %r not found: %s", url, r.text)
raise GCoreClientNotFound(r)
elif r.status_code == http.HTTPStatus.INTERNAL_SERVER_ERROR:
self.log.error("server error no %r to %r: %s", data, url, r.text)
raise GCoreClientException(r)
r.raise_for_status()
return r
def zone(self, zone_name):
return self._request(
"GET", self._build_url(self._api_url, self.ROOT_ZONES, zone_name)
).json()
def zone_create(self, zone_name):
return self._request(
"POST",
self._build_url(self._api_url, self.ROOT_ZONES),
data={"name": zone_name},
).json()
def zone_records(self, zone_name):
rrsets = self._request(
"GET",
"{}".format(
self._build_url(
self._api_url, self.ROOT_ZONES, zone_name, "rrsets"
)
),
params={"all": "true"},
).json()
records = rrsets["rrsets"]
return records
def record_create(self, zone_name, rrset_name, type_, data):
self._request(
"POST", self._rrset_url(zone_name, rrset_name, type_), data=data
)
def record_update(self, zone_name, rrset_name, type_, data):
self._request(
"PUT", self._rrset_url(zone_name, rrset_name, type_), data=data
)
def record_delete(self, zone_name, rrset_name, type_):
self._request("DELETE", self._rrset_url(zone_name, rrset_name, type_))
def _rrset_url(self, zone_name, rrset_name, type_):
return self._build_url(
self._api_url, self.ROOT_ZONES, zone_name, rrset_name, type_
)
@staticmethod
def _build_url(base, *items):
for i in items:
base = base.strip("/") + "/"
base = urllib.parse.urljoin(base, i)
return base
class GCoreProvider(BaseProvider):
"""
GCore provider using API v2.
gcore:
class: octodns.provider.gcore.GCoreProvider
# Your API key
token: XXXXXXXXXXXX
# token_type: APIKey
# or login + password
login: XXXXXXXXXXXX
password: XXXXXXXXXXXX
# auth_url: https://api.gcdn.co
# url: https://dnsapi.gcorelabs.com/v2
# records_per_response: 1
"""
SUPPORTS_GEO = False
SUPPORTS_DYNAMIC = True
SUPPORTS = set(("A", "AAAA", "NS", "MX", "TXT", "SRV", "CNAME", "PTR"))
def __init__(self, id, *args, **kwargs):
token = kwargs.pop("token", None)
token_type = kwargs.pop("token_type", "APIKey")
login = kwargs.pop("login", None)
password = kwargs.pop("password", None)
api_url = kwargs.pop("url", "https://dnsapi.gcorelabs.com/v2")
auth_url = kwargs.pop("auth_url", "https://api.gcdn.co")
self.records_per_response = kwargs.pop("records_per_response", 1)
self.log = logging.getLogger("GCoreProvider[{}]".format(id))
self.log.debug("__init__: id=%s", id)
super(GCoreProvider, self).__init__(id, *args, **kwargs)
self._client = GCoreClient(
self.log,
api_url,
auth_url,
token=token,
token_type=token_type,
login=login,
password=password,
)
def _add_dot_if_need(self, value):
return "{}.".format(value) if not value.endswith(".") else value
def _build_pools(self, record, default_pool_name, value_transform_fn):
defaults = []
geo_sets, pool_idx = dict(), 0
pools = defaultdict(lambda: {"values": []})
for rr in record["resource_records"]:
meta = rr.get("meta", {}) or {}
value = {"value": value_transform_fn(rr["content"][0])}
countries = meta.get("countries", []) or []
continents = meta.get("continents", []) or []
if meta.get("default", False):
pools[default_pool_name]["values"].append(value)
defaults.append(value["value"])
continue
# defaults is false or missing and no conties or continents
elif len(continents) == 0 and len(countries) == 0:
defaults.append(value["value"])
continue
# RR with the same set of countries and continents are
# combined in single pool
geo_set = frozenset(
[GeoCodes.country_to_code(cc.upper()) for cc in countries]
) | frozenset(cc.upper() for cc in continents)
if geo_set not in geo_sets:
geo_sets[geo_set] = "pool-{}".format(pool_idx)
pool_idx += 1
pools[geo_sets[geo_set]]["values"].append(value)
return pools, geo_sets, defaults
def _build_rules(self, pools, geo_sets):
rules = []
for name, _ in pools.items():
rule = {"pool": name}
geo_set = next(
(
geo_set
for geo_set, pool_name in geo_sets.items()
if pool_name == name
),
{},
)
if len(geo_set) > 0:
rule["geos"] = list(geo_set)
rules.append(rule)
return sorted(rules, key=lambda x: x["pool"])
def _data_for_dynamic(self, record, value_transform_fn=lambda x: x):
default_pool = "other"
pools, geo_sets, defaults = self._build_pools(
record, default_pool, value_transform_fn
)
if len(pools) == 0:
raise RuntimeError(
"filter is enabled, but no pools where built for {}".format(
record
)
)
# defaults can't be empty, so use first pool values
if len(defaults) == 0:
defaults = [
value_transform_fn(v["value"])
for v in next(iter(pools.values()))["values"]
]
# if at least one default RR was found then setup fallback for
# other pools to default
if default_pool in pools:
for pool_name, pool in pools.items():
if pool_name == default_pool:
continue
pool["fallback"] = default_pool
rules = self._build_rules(pools, geo_sets)
return pools, rules, defaults
def _data_for_single(self, _type, record):
return {
"ttl": record["ttl"],
"type": _type,
"value": self._add_dot_if_need(
record["resource_records"][0]["content"][0]
),
}
_data_for_PTR = _data_for_single
def _data_for_CNAME(self, _type, record):
if record.get("filters") is None:
return self._data_for_single(_type, record)
pools, rules, defaults = self._data_for_dynamic(
record, self._add_dot_if_need
)
return {
"ttl": record["ttl"],
"type": _type,
"dynamic": {"pools": pools, "rules": rules},
"value": self._add_dot_if_need(defaults[0]),
}
def _data_for_multiple(self, _type, record):
extra = dict()
if record.get("filters") is not None:
pools, rules, defaults = self._data_for_dynamic(record)
extra = {
"dynamic": {"pools": pools, "rules": rules},
"values": defaults,
}
else:
extra = {
"values": [
rr_value
for resource_record in record["resource_records"]
for rr_value in resource_record["content"]
]
}
return {
"ttl": record["ttl"],
"type": _type,
**extra,
}
_data_for_A = _data_for_multiple
_data_for_AAAA = _data_for_multiple
def _data_for_TXT(self, _type, record):
return {
"ttl": record["ttl"],
"type": _type,
"values": [
rr_value.replace(";", "\\;")
for resource_record in record["resource_records"]
for rr_value in resource_record["content"]
],
}
def _data_for_MX(self, _type, record):
return {
"ttl": record["ttl"],
"type": _type,
"values": [
dict(
preference=preference,
exchange=self._add_dot_if_need(exchange),
)
for preference, exchange in map(
lambda x: x["content"], record["resource_records"]
)
],
}
def _data_for_NS(self, _type, record):
return {
"ttl": record["ttl"],
"type": _type,
"values": [
self._add_dot_if_need(rr_value)
for resource_record in record["resource_records"]
for rr_value in resource_record["content"]
],
}
def _data_for_SRV(self, _type, record):
return {
"ttl": record["ttl"],
"type": _type,
"values": [
dict(
priority=priority,
weight=weight,
port=port,
target=self._add_dot_if_need(target),
)
for priority, weight, port, target in map(
lambda x: x["content"], record["resource_records"]
)
],
}
def zone_records(self, zone):
try:
return self._client.zone_records(zone.name[:-1]), True
except GCoreClientNotFound:
return [], False
def populate(self, zone, target=False, lenient=False):
self.log.debug(
"populate: name=%s, target=%s, lenient=%s",
zone.name,
target,
lenient,
)
values = defaultdict(defaultdict)
records, exists = self.zone_records(zone)
for record in records:
_type = record["type"].upper()
if _type not in self.SUPPORTS:
continue
if self._should_ignore(record):
continue
rr_name = zone.hostname_from_fqdn(record["name"])
values[rr_name][_type] = record
before = len(zone.records)
for name, types in values.items():
for _type, record in types.items():
data_for = getattr(self, "_data_for_{}".format(_type))
record = Record.new(
zone,
name,
data_for(_type, record),
source=self,
lenient=lenient,
)
zone.add_record(record, lenient=lenient)
self.log.info(
"populate: found %s records, exists=%s",
len(zone.records) - before,
exists,
)
return exists
def _should_ignore(self, record):
name = record.get("name", "name-not-defined")
if record.get("filters") is None:
return False
want_filters = 3
filters = record.get("filters", [])
if len(filters) != want_filters:
self.log.info(
"ignore %s has filters and their count is not %d",
name,
want_filters,
)
return True
types = [v.get("type") for v in filters]
for i, want_type in enumerate(["geodns", "default", "first_n"]):
if types[i] != want_type:
self.log.info(
"ignore %s, filters.%d.type is %s, want %s",
name,
i,
types[i],
want_type,
)
return True
limits = [filters[i].get("limit", 1) for i in [1, 2]]
if limits[0] != limits[1]:
self.log.info(
"ignore %s, filters.1.limit (%d) != filters.2.limit (%d)",
name,
limits[0],
limits[1],
)
return True
return False
def _params_for_dymanic(self, record):
records = []
default_pool_found = False
default_values = set(
record.values if hasattr(record, "values") else [record.value]
)
for rule in record.dynamic.rules:
meta = dict()
# build meta tags if geos information present
if len(rule.data.get("geos", [])) > 0:
for geo_code in rule.data["geos"]:
geo = GeoCodes.parse(geo_code)
country = geo["country_code"]
continent = geo["continent_code"]
if country is not None:
meta.setdefault("countries", []).append(country)
else:
meta.setdefault("continents", []).append(continent)
else:
meta["default"] = True
pool_values = set()
pool_name = rule.data["pool"]
for value in record.dynamic.pools[pool_name].data["values"]:
v = value["value"]
records.append({"content": [v], "meta": meta})
pool_values.add(v)
default_pool_found |= default_values == pool_values
# if default values doesn't match any pool values, then just add this
# values with no any meta
if not default_pool_found:
for value in default_values:
records.append({"content": [value]})
return records
def _params_for_single(self, record):
return {
"ttl": record.ttl,
"resource_records": [{"content": [record.value]}],
}
_params_for_PTR = _params_for_single
def _params_for_CNAME(self, record):
if not record.dynamic:
return self._params_for_single(record)
return {
"ttl": record.ttl,
"resource_records": self._params_for_dymanic(record),
"filters": [
{"type": "geodns"},
{
"type": "default",
"limit": self.records_per_response,
"strict": False,
},
{"type": "first_n", "limit": self.records_per_response},
],
}
def _params_for_multiple(self, record):
extra = dict()
if record.dynamic:
extra["resource_records"] = self._params_for_dymanic(record)
extra["filters"] = [
{"type": "geodns"},
{
"type": "default",
"limit": self.records_per_response,
"strict": False,
},
{"type": "first_n", "limit": self.records_per_response},
]
else:
extra["resource_records"] = [
{"content": [value]} for value in record.values
]
return {
"ttl": record.ttl,
**extra,
}
_params_for_A = _params_for_multiple
_params_for_AAAA = _params_for_multiple
def _params_for_NS(self, record):
return {
"ttl": record.ttl,
"resource_records": [
{"content": [value]} for value in record.values
],
}
def _params_for_TXT(self, record):
return {
"ttl": record.ttl,
"resource_records": [
{"content": [value.replace("\\;", ";")]}
for value in record.values
],
}
def _params_for_MX(self, record):
return {
"ttl": record.ttl,
"resource_records": [
{"content": [rec.preference, rec.exchange]}
for rec in record.values
],
}
def _params_for_SRV(self, record):
return {
"ttl": record.ttl,
"resource_records": [
{"content": [rec.priority, rec.weight, rec.port, rec.target]}
for rec in record.values
],
}
def _apply_create(self, change):
self.log.info("creating: %s", change)
new = change.new
data = getattr(self, "_params_for_{}".format(new._type))(new)
self._client.record_create(
new.zone.name[:-1], new.fqdn, new._type, data
)
def _apply_update(self, change):
self.log.info("updating: %s", change)
new = change.new
data = getattr(self, "_params_for_{}".format(new._type))(new)
self._client.record_update(
new.zone.name[:-1], new.fqdn, new._type, data
)
def _apply_delete(self, change):
self.log.info("deleting: %s", change)
existing = change.existing
self._client.record_delete(
existing.zone.name[:-1], existing.fqdn, existing._type
)
def _apply(self, plan):
desired = plan.desired
changes = plan.changes
zone = desired.name[:-1]
self.log.debug(
"_apply: zone=%s, len(changes)=%d", desired.name, len(changes)
)
try:
self._client.zone(zone)
except GCoreClientNotFound:
self.log.info("_apply: no existing zone, trying to create it")
self._client.zone_create(zone)
self.log.info("_apply: zone has been successfully created")
changes.reverse()
for change in changes:
class_name = change.__class__.__name__
getattr(self, "_apply_{}".format(class_name.lower()))(change)

+ 3
- 3
octodns/provider/mythicbeasts.py View File

@ -70,13 +70,13 @@ class MythicBeastsProvider(BaseProvider):
...
mythicbeasts:
class: octodns.provider.mythicbeasts.MythicBeastsProvider
passwords:
my.domain.: 'password'
passwords:
my.domain.: 'DNS API v1 password'
zones:
my.domain.:
targets:
- mythic
- mythicbeasts
'''
RE_MX = re.compile(r'^(?P<preference>[0-9]+)\s+(?P<exchange>\S+)$',


+ 65
- 17
octodns/provider/ns1.py View File

@ -77,8 +77,10 @@ class Ns1Client(object):
self._datafeed = client.datafeed()
self._datasource_id = None
self._feeds_for_monitors = None
self._monitors_cache = None
self._notifylists_cache = None
@property
def datasource_id(self):
@ -121,6 +123,14 @@ class Ns1Client(object):
{m['id']: m for m in self.monitors_list()}
return self._monitors_cache
@property
def notifylists(self):
if self._notifylists_cache is None:
self.log.debug('notifylists: fetching & building')
self._notifylists_cache = \
{l['name']: l for l in self.notifylists_list()}
return self._notifylists_cache
def datafeed_create(self, sourceid, name, config):
ret = self._try(self._datafeed.create, sourceid, name, config)
self.feeds_for_monitors[config['jobid']] = ret['id']
@ -163,10 +173,17 @@ class Ns1Client(object):
return ret
def notifylists_delete(self, nlid):
for name, nl in self.notifylists.items():
if nl['id'] == nlid:
del self._notifylists_cache[name]
break
return self._try(self._notifylists.delete, nlid)
def notifylists_create(self, **body):
return self._try(self._notifylists.create, body)
nl = self._try(self._notifylists.create, body)
# cache it
self.notifylists[nl['name']] = nl
return nl
def notifylists_list(self):
return self._try(self._notifylists.list)
@ -216,6 +233,13 @@ class Ns1Provider(BaseProvider):
# Only required if using dynamic records
monitor_regions:
- lga
# Optional. Default: false. true is Recommended, but not the default
# for backwards compatibility reasons. If true, all NS1 monitors will
# use a shared notify list rather than one per record & value
# combination. See CHANGELOG,
# https://github.com/octodns/octodns/blob/master/CHANGELOG.md, for more
# information before enabling this behavior.
shared_notifylist: false
# Optional. Default: None. If set, back off in advance to avoid 429s
# from rate-limiting. Generally this should be set to the number
# of processes or workers hitting the API, e.g. the value of
@ -237,6 +261,7 @@ class Ns1Provider(BaseProvider):
'NS', 'PTR', 'SPF', 'SRV', 'TXT', 'URLFWD'))
ZONE_NOT_FOUND_MESSAGE = 'server error: zone not found'
SHARED_NOTIFYLIST_NAME = 'octoDNS NS1 Notify List'
def _update_filter(self, filter, with_disabled):
if with_disabled:
@ -368,7 +393,8 @@ class Ns1Provider(BaseProvider):
}
def __init__(self, id, api_key, retry_count=4, monitor_regions=None,
parallelism=None, client_config=None, *args, **kwargs):
parallelism=None, client_config=None, shared_notifylist=False,
*args, **kwargs):
self.log = getLogger('Ns1Provider[{}]'.format(id))
self.log.debug('__init__: id=%s, api_key=***, retry_count=%d, '
'monitor_regions=%s, parallelism=%s, client_config=%s',
@ -376,6 +402,7 @@ class Ns1Provider(BaseProvider):
client_config)
super(Ns1Provider, self).__init__(id, *args, **kwargs)
self.monitor_regions = monitor_regions
self.shared_notifylist = shared_notifylist
self._client = Ns1Client(api_key, parallelism, retry_count,
client_config)
@ -888,7 +915,6 @@ class Ns1Provider(BaseProvider):
def _feed_create(self, monitor):
monitor_id = monitor['id']
self.log.debug('_feed_create: monitor=%s', monitor_id)
# TODO: looks like length limit is 64 char
name = '{} - {}'.format(monitor['name'], self._uuid()[:6])
# Create the data feed
@ -902,22 +928,36 @@ class Ns1Provider(BaseProvider):
return feed_id
def _notifylists_find_or_create(self, name):
self.log.debug('_notifylists_find_or_create: name="%s"', name)
try:
nl = self._client.notifylists[name]
self.log.debug('_notifylists_find_or_create: existing=%s',
nl['id'])
except KeyError:
notify_list = [{
'config': {
'sourceid': self._client.datasource_id,
},
'type': 'datafeed',
}]
nl = self._client.notifylists_create(name=name,
notify_list=notify_list)
self.log.debug('_notifylists_find_or_create: created=%s',
nl['id'])
return nl
def _monitor_create(self, monitor):
self.log.debug('_monitor_create: monitor="%s"', monitor['name'])
# Create the notify list
notify_list = [{
'config': {
'sourceid': self._client.datasource_id,
},
'type': 'datafeed',
}]
nl = self._client.notifylists_create(name=monitor['name'],
notify_list=notify_list)
nl_id = nl['id']
self.log.debug('_monitor_create: notify_list=%s', nl_id)
# Find the right notifylist
nl_name = self.SHARED_NOTIFYLIST_NAME \
if self.shared_notifylist else monitor['name']
nl = self._notifylists_find_or_create(nl_name)
# Create the monitor
monitor['notify_list'] = nl_id
monitor['notify_list'] = nl['id']
monitor = self._client.monitors_create(**monitor)
monitor_id = monitor['id']
self.log.debug('_monitor_create: monitor=%s', monitor_id)
@ -1028,7 +1068,13 @@ class Ns1Provider(BaseProvider):
self._client.monitors_delete(monitor_id)
notify_list_id = monitor['notify_list']
self._client.notifylists_delete(notify_list_id)
for nl_name, nl in self._client.notifylists.items():
if nl['id'] == notify_list_id:
# We've found the that might need deleting
if nl['name'] != self.SHARED_NOTIFYLIST_NAME:
# It's not shared so is safe to delete
self._client.notifylists_delete(notify_list_id)
break
def _add_answers_for_pool(self, answers, default_answers, pool_name,
pool_label, pool_answers, pools, priority):
@ -1364,7 +1410,9 @@ class Ns1Provider(BaseProvider):
params, active_monitor_ids = \
getattr(self, '_params_for_{}'.format(_type))(new)
self._client.records_update(zone, domain, _type, **params)
self._monitors_gc(new, active_monitor_ids)
# If we're cleaning up we need to send in the old record since it'd
# have anything that needs cleaning up
self._monitors_gc(change.existing, active_monitor_ids)
def _apply_Delete(self, ns1_zone, change):
existing = change.existing


+ 245
- 0
tests/fixtures/gcore-no-changes.json View File

@ -0,0 +1,245 @@
{
"rrsets": [
{
"name": "unit.tests",
"type": "A",
"ttl": 300,
"resource_records": [
{
"content": [
"1.2.3.4"
]
},
{
"content": [
"1.2.3.5"
]
}
]
},
{
"name": "unit.tests",
"type": "NS",
"ttl": 300,
"resource_records": [
{
"content": [
"ns2.gcdn.services"
]
},
{
"content": [
"ns1.gcorelabs.net"
]
}
]
},
{
"name": "_imap._tcp",
"type": "SRV",
"ttl": 600,
"resource_records": [
{
"content": [
0,
0,
0,
"."
]
}
]
},
{
"name": "_pop3._tcp",
"type": "SRV",
"ttl": 600,
"resource_records": [
{
"content": [
0,
0,
0,
"."
]
}
]
},
{
"name": "_srv._tcp",
"type": "SRV",
"ttl": 600,
"resource_records": [
{
"content": [
12,
20,
30,
"foo-2.unit.tests"
]
},
{
"content": [
10,
20,
30,
"foo-1.unit.tests"
]
}
]
},
{
"name": "aaaa.unit.tests",
"type": "AAAA",
"ttl": 600,
"resource_records": [
{
"content": [
"2601:644:500:e210:62f8:1dff:feb8:947a"
]
}
]
},
{
"name": "cname.unit.tests",
"type": "CNAME",
"ttl": 300,
"resource_records": [
{
"content": [
"unit.tests."
]
}
]
},
{
"name": "excluded.unit.tests",
"type": "CNAME",
"ttl": 3600,
"resource_records": [
{
"content": [
"unit.tests."
]
}
]
},
{
"name": "mx.unit.tests",
"type": "MX",
"ttl": 300,
"resource_records": [
{
"content": [
40,
"smtp-1.unit.tests."
]
},
{
"content": [
20,
"smtp-2.unit.tests."
]
},
{
"content": [
30,
"smtp-3.unit.tests."
]
},
{
"content": [
10,
"smtp-4.unit.tests."
]
}
]
},
{
"name": "ptr.unit.tests.",
"type": "PTR",
"ttl": 300,
"resource_records": [
{
"content": [
"foo.bar.com"
]
}
]
},
{
"name": "sub.unit.tests",
"type": "NS",
"ttl": 3600,
"resource_records": [
{
"content": [
"6.2.3.4"
]
},
{
"content": [
"7.2.3.4"
]
}
]
},
{
"name": "txt.unit.tests",
"type": "TXT",
"ttl": 600,
"resource_records": [
{
"content": [
"Bah bah black sheep"
]
},
{
"content": [
"have you any wool."
]
},
{
"content": [
"v=DKIM1;k=rsa;s=email;h=sha256;p=A/kinda+of/long/string+with+numb3rs"
]
}
]
},
{
"name": "www.unit.tests.",
"type": "A",
"ttl": 300,
"resource_records": [
{
"content": [
"2.2.3.6"
]
}
]
},
{
"name": "www.sub.unit.tests.",
"type": "A",
"ttl": 300,
"resource_records": [
{
"content": [
"2.2.3.6"
]
}
]
},
{
"name": "spf.sub.unit.tests.",
"type": "SPF",
"ttl": 600,
"resource_records": [
{
"content": [
"v=spf1 ip4:192.168.0.1/16-all"
]
}
]
}
]
}

+ 428
- 0
tests/fixtures/gcore-records.json View File

@ -0,0 +1,428 @@
{
"rrsets": [
{
"name": "unit.tests",
"type": "A",
"ttl": 300,
"resource_records": [
{
"content": [
"1.2.3.4"
]
}
]
},
{
"name": "unit.tests",
"type": "NS",
"ttl": 300,
"resource_records": [
{
"content": [
"ns2.gcdn.services"
]
},
{
"content": [
"ns1.gcorelabs.net"
]
}
]
},
{
"name": "_imap._tcp",
"type": "SRV",
"ttl": 1200,
"resource_records": [
{
"content": [
0,
0,
0,
"."
]
}
]
},
{
"name": "_pop3._tcp",
"type": "SRV",
"ttl": 1200,
"resource_records": [
{
"content": [
0,
0,
0,
"."
]
}
]
},
{
"name": "_srv._tcp",
"type": "SRV",
"ttl": 1200,
"resource_records": [
{
"content": [
12,
20,
30,
"foo-2.unit.tests."
]
},
{
"content": [
10,
20,
30,
"foo-1.unit.tests."
]
}
]
},
{
"name": "aaaa.unit.tests",
"type": "AAAA",
"ttl": 600,
"resource_records": [
{
"content": [
"2601:644:500:e210:62f8:1dff:feb8:947a"
]
}
]
},
{
"name": "cname.unit.tests",
"type": "CNAME",
"ttl": 300,
"resource_records": [
{
"content": [
"unit.tests."
]
}
]
},
{
"name": "mx.unit.tests",
"type": "MX",
"ttl": 600,
"resource_records": [
{
"content": [
40,
"smtp-1.unit.tests."
]
},
{
"content": [
20,
"smtp-2.unit.tests."
]
}
]
},
{
"name": "ptr.unit.tests.",
"type": "PTR",
"ttl": 300,
"resource_records": [
{
"content": [
"foo.bar.com"
]
}
]
},
{
"name": "sub.unit.tests",
"type": "NS",
"ttl": 300,
"resource_records": [
{
"content": [
"6.2.3.4"
]
},
{
"content": [
"7.2.3.4"
]
}
]
},
{
"name": "txt.unit.tests",
"type": "TXT",
"ttl": 300,
"resource_records": [
{
"content": [
"\"Bah bah black sheep\""
]
},
{
"content": [
"\"have you any wool.\""
]
},
{
"content": [
"\"v=DKIM1;k=rsa;s=email;h=sha256;p=A/kinda+of/long/string+with+numb3rs\""
]
}
]
},
{
"name": "www.unit.tests.",
"type": "A",
"ttl": 300,
"resource_records": [
{
"content": [
"2.2.3.6"
]
}
]
},
{
"name": "www.sub.unit.tests.",
"type": "A",
"ttl": 300,
"resource_records": [
{
"content": [
"2.2.3.6"
]
}
]
},
{
"name": "geo-A-single.unit.tests.",
"type": "A",
"ttl": 300,
"filters": [
{
"type": "geodns"
},
{
"limit": 1,
"strict": false,
"type": "default"
},
{
"limit": 1,
"type": "first_n"
}
],
"resource_records": [
{
"content": [
"7.7.7.7"
],
"meta": {
"countries": [
"RU"
]
}
},
{
"content": [
"8.8.8.8"
],
"meta": {
"countries": [
"RU"
]
}
},
{
"content": [
"9.9.9.9"
],
"meta": {
"continents": [
"EU"
]
}
},
{
"content": [
"10.10.10.10"
],
"meta": {
"default": true
}
}
]
},
{
"name": "geo-no-def.unit.tests.",
"type": "A",
"ttl": 300,
"filters": [
{
"type": "geodns"
},
{
"limit": 1,
"strict": false,
"type": "default"
},
{
"limit": 1,
"type": "first_n"
}
],
"resource_records": [
{
"content": [
"7.7.7.7"
],
"meta": {
"countries": [
"RU"
]
}
}
]
},
{
"name": "geo-CNAME.unit.tests.",
"type": "CNAME",
"ttl": 300,
"filters": [
{
"type": "geodns"
},
{
"limit": 1,
"strict": false,
"type": "default"
},
{
"limit": 1,
"type": "first_n"
}
],
"resource_records": [
{
"content": [
"ru-1.unit.tests"
],
"meta": {
"countries": [
"RU"
]
}
},
{
"content": [
"ru-2.unit.tests"
],
"meta": {
"countries": [
"RU"
]
}
},
{
"content": [
"eu.unit.tests"
],
"meta": {
"continents": [
"EU"
]
}
},
{
"content": [
"any.unit.tests."
],
"meta": {
"default": true
}
}
]
},
{
"name": "geo-ignore-len-filters.unit.tests.",
"type": "A",
"ttl": 300,
"filters": [
{
"limit": 1,
"type": "first_n"
},
{
"limit": 1,
"strict": false,
"type": "default"
}
],
"resource_records": [
{
"content": [
"7.7.7.7"
]
}
]
},
{
"name": "geo-ignore-types.unit.tests.",
"type": "A",
"ttl": 300,
"filters": [
{
"type": "geodistance"
},
{
"limit": 1,
"type": "first_n"
},
{
"limit": 1,
"strict": false,
"type": "default"
}
],
"resource_records": [
{
"content": [
"7.7.7.7"
]
}
]
},
{
"name": "geo-ignore-limits.unit.tests.",
"type": "A",
"ttl": 300,
"filters": [
{
"type": "geodns"
},
{
"limit": 2,
"strict": false,
"type": "default"
},
{
"limit": 1,
"type": "first_n"
}
],
"resource_records": [
{
"content": [
"7.7.7.7"
]
}
]
}
]
}

+ 27
- 0
tests/fixtures/gcore-zone.json View File

@ -0,0 +1,27 @@
{
"id": 27757,
"name": "unit.test",
"nx_ttl": 300,
"retry": 5400,
"refresh": 0,
"expiry": 1209600,
"contact": "support@gcorelabs.com",
"serial": 1614752868,
"primary_server": "ns1.gcorelabs.net",
"records": [
{
"id": 12419,
"name": "unit.test",
"type": "ns",
"ttl": 300,
"short_answers": [
"[ns2.gcdn.services]",
"[ns1.gcorelabs.net]"
]
}
],
"dns_servers": [
"ns1.gcorelabs.net",
"ns2.gcdn.services"
]
}

+ 672
- 0
tests/test_octodns_provider_gcore.py View File

@ -0,0 +1,672 @@
#
#
#
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from mock import Mock, call
from os.path import dirname, join
from requests_mock import ANY, mock as requests_mock
from six import text_type
from unittest import TestCase
from octodns.record import Record, Update, Delete, Create
from octodns.provider.gcore import (
GCoreProvider,
GCoreClientBadRequest,
GCoreClientNotFound,
GCoreClientException,
)
from octodns.provider.yaml import YamlProvider
from octodns.zone import Zone
class TestGCoreProvider(TestCase):
expected = Zone("unit.tests.", [])
source = YamlProvider("test", join(dirname(__file__), "config"))
source.populate(expected)
default_filters = [
{"type": "geodns"},
{
"type": "default",
"limit": 1,
"strict": False,
},
{"type": "first_n", "limit": 1},
]
def test_populate(self):
provider = GCoreProvider("test_id", token="token")
# TC: 400 - Bad Request.
with requests_mock() as mock:
mock.get(ANY, status_code=400, text='{"error":"bad body"}')
with self.assertRaises(GCoreClientBadRequest) as ctx:
zone = Zone("unit.tests.", [])
provider.populate(zone)
self.assertIn('"error":"bad body"', text_type(ctx.exception))
# TC: 404 - Not Found.
with requests_mock() as mock:
mock.get(
ANY, status_code=404, text='{"error":"zone is not found"}'
)
with self.assertRaises(GCoreClientNotFound) as ctx:
zone = Zone("unit.tests.", [])
provider._client.zone(zone.name)
self.assertIn(
'"error":"zone is not found"', text_type(ctx.exception)
)
# TC: General error
with requests_mock() as mock:
mock.get(ANY, status_code=500, text="Things caught fire")
with self.assertRaises(GCoreClientException) as ctx:
zone = Zone("unit.tests.", [])
provider.populate(zone)
self.assertEqual("Things caught fire", text_type(ctx.exception))
# TC: No credentials or token error
with requests_mock() as mock:
with self.assertRaises(ValueError) as ctx:
GCoreProvider("test_id")
self.assertEqual(
"either token or login & password must be set",
text_type(ctx.exception),
)
# TC: Auth with login password
with requests_mock() as mock:
def match_body(request):
return {"username": "foo", "password": "bar"} == request.json()
auth_url = "http://api/auth/jwt/login"
mock.post(
auth_url,
additional_matcher=match_body,
status_code=200,
json={"access": "access"},
)
providerPassword = GCoreProvider(
"test_id",
url="http://dns",
auth_url="http://api",
login="foo",
password="bar",
)
assert mock.called
# make sure token passed in header
zone_rrset_url = "http://dns/zones/unit.tests/rrsets?all=true"
mock.get(
zone_rrset_url,
request_headers={"Authorization": "Bearer access"},
status_code=404,
)
zone = Zone("unit.tests.", [])
assert not providerPassword.populate(zone)
# TC: No diffs == no changes
with requests_mock() as mock:
base = "https://dnsapi.gcorelabs.com/v2/zones/unit.tests/rrsets"
with open("tests/fixtures/gcore-no-changes.json") as fh:
mock.get(base, text=fh.read())
zone = Zone("unit.tests.", [])
provider.populate(zone)
self.assertEqual(14, len(zone.records))
self.assertEqual(
{
"",
"_imap._tcp",
"_pop3._tcp",
"_srv._tcp",
"aaaa",
"cname",
"excluded",
"mx",
"ptr",
"sub",
"txt",
"www",
"www.sub",
},
{r.name for r in zone.records},
)
changes = self.expected.changes(zone, provider)
self.assertEqual(0, len(changes))
# TC: 4 create (dynamic) + 1 removed + 7 modified
with requests_mock() as mock:
base = "https://dnsapi.gcorelabs.com/v2/zones/unit.tests/rrsets"
with open("tests/fixtures/gcore-records.json") as fh:
mock.get(base, text=fh.read())
zone = Zone("unit.tests.", [])
provider.populate(zone)
self.assertEqual(16, len(zone.records))
changes = self.expected.changes(zone, provider)
self.assertEqual(11, len(changes))
self.assertEqual(
3, len([c for c in changes if isinstance(c, Create)])
)
self.assertEqual(
1, len([c for c in changes if isinstance(c, Delete)])
)
self.assertEqual(
7, len([c for c in changes if isinstance(c, Update)])
)
# TC: no pools can be built
with requests_mock() as mock:
base = "https://dnsapi.gcorelabs.com/v2/zones/unit.tests/rrsets"
mock.get(
base,
json={
"rrsets": [
{
"name": "unit.tests.",
"type": "A",
"ttl": 300,
"filters": self.default_filters,
"resource_records": [{"content": ["7.7.7.7"]}],
}
]
},
)
zone = Zone("unit.tests.", [])
with self.assertRaises(RuntimeError) as ctx:
provider.populate(zone)
self.assertTrue(
str(ctx.exception).startswith(
"filter is enabled, but no pools where built for"
),
"{} - is not start from desired text".format(ctx.exception),
)
def test_apply(self):
provider = GCoreProvider("test_id", url="http://api", token="token")
# TC: Zone does not exists but can be created.
with requests_mock() as mock:
mock.get(
ANY, status_code=404, text='{"error":"zone is not found"}'
)
mock.post(ANY, status_code=200, text='{"id":1234}')
plan = provider.plan(self.expected)
provider.apply(plan)
# TC: Zone does not exists and can't be created.
with requests_mock() as mock:
mock.get(
ANY, status_code=404, text='{"error":"zone is not found"}'
)
mock.post(
ANY,
status_code=400,
text='{"error":"parent zone is already'
' occupied by another client"}',
)
with self.assertRaises(
(GCoreClientNotFound, GCoreClientBadRequest)
) as ctx:
plan = provider.plan(self.expected)
provider.apply(plan)
self.assertIn(
"parent zone is already occupied by another client",
text_type(ctx.exception),
)
resp = Mock()
resp.json = Mock()
provider._client._request = Mock(return_value=resp)
with open("tests/fixtures/gcore-zone.json") as fh:
zone = fh.read()
# non-existent domain
resp.json.side_effect = [
GCoreClientNotFound(resp), # no zone in populate
GCoreClientNotFound(resp), # no domain during apply
zone,
]
plan = provider.plan(self.expected)
# TC: create all
self.assertEqual(13, len(plan.changes))
self.assertEqual(13, provider.apply(plan))
self.assertFalse(plan.exists)
provider._client._request.assert_has_calls(
[
call(
"GET",
"http://api/zones/unit.tests/rrsets",
params={"all": "true"},
),
call("GET", "http://api/zones/unit.tests"),
call("POST", "http://api/zones", data={"name": "unit.tests"}),
call(
"POST",
"http://api/zones/unit.tests/www.sub.unit.tests./A",
data={
"ttl": 300,
"resource_records": [{"content": ["2.2.3.6"]}],
},
),
call(
"POST",
"http://api/zones/unit.tests/www.unit.tests./A",
data={
"ttl": 300,
"resource_records": [{"content": ["2.2.3.6"]}],
},
),
call(
"POST",
"http://api/zones/unit.tests/txt.unit.tests./TXT",
data={
"ttl": 600,
"resource_records": [
{"content": ["Bah bah black sheep"]},
{"content": ["have you any wool."]},
{
"content": [
"v=DKIM1;k=rsa;s=email;h=sha256;p=A/kinda+"
"of/long/string+with+numb3rs"
]
},
],
},
),
call(
"POST",
"http://api/zones/unit.tests/sub.unit.tests./NS",
data={
"ttl": 3600,
"resource_records": [
{"content": ["6.2.3.4."]},
{"content": ["7.2.3.4."]},
],
},
),
call(
"POST",
"http://api/zones/unit.tests/ptr.unit.tests./PTR",
data={
"ttl": 300,
"resource_records": [
{"content": ["foo.bar.com."]},
],
},
),
call(
"POST",
"http://api/zones/unit.tests/mx.unit.tests./MX",
data={
"ttl": 300,
"resource_records": [
{"content": [10, "smtp-4.unit.tests."]},
{"content": [20, "smtp-2.unit.tests."]},
{"content": [30, "smtp-3.unit.tests."]},
{"content": [40, "smtp-1.unit.tests."]},
],
},
),
call(
"POST",
"http://api/zones/unit.tests/excluded.unit.tests./CNAME",
data={
"ttl": 3600,
"resource_records": [{"content": ["unit.tests."]}],
},
),
call(
"POST",
"http://api/zones/unit.tests/cname.unit.tests./CNAME",
data={
"ttl": 300,
"resource_records": [{"content": ["unit.tests."]}],
},
),
call(
"POST",
"http://api/zones/unit.tests/aaaa.unit.tests./AAAA",
data={
"ttl": 600,
"resource_records": [
{
"content": [
"2601:644:500:e210:62f8:1dff:feb8:947a"
]
}
],
},
),
call(
"POST",
"http://api/zones/unit.tests/_srv._tcp.unit.tests./SRV",
data={
"ttl": 600,
"resource_records": [
{"content": [10, 20, 30, "foo-1.unit.tests."]},
{"content": [12, 20, 30, "foo-2.unit.tests."]},
],
},
),
call(
"POST",
"http://api/zones/unit.tests/_pop3._tcp.unit.tests./SRV",
data={
"ttl": 600,
"resource_records": [{"content": [0, 0, 0, "."]}],
},
),
call(
"POST",
"http://api/zones/unit.tests/_imap._tcp.unit.tests./SRV",
data={
"ttl": 600,
"resource_records": [{"content": [0, 0, 0, "."]}],
},
),
call(
"POST",
"http://api/zones/unit.tests/unit.tests./A",
data={
"ttl": 300,
"resource_records": [
{"content": ["1.2.3.4"]},
{"content": ["1.2.3.5"]},
],
},
),
]
)
# expected number of total calls
self.assertEqual(16, provider._client._request.call_count)
# TC: delete 1 and update 1
provider._client._request.reset_mock()
provider._client.zone_records = Mock(
return_value=[
{
"name": "www",
"ttl": 300,
"type": "A",
"resource_records": [{"content": ["1.2.3.4"]}],
},
{
"name": "ttl",
"ttl": 600,
"type": "A",
"resource_records": [{"content": ["3.2.3.4"]}],
},
]
)
# Domain exists, we don't care about return
resp.json.side_effect = ["{}"]
wanted = Zone("unit.tests.", [])
wanted.add_record(
Record.new(
wanted, "ttl", {"ttl": 300, "type": "A", "value": "3.2.3.4"}
)
)
plan = provider.plan(wanted)
self.assertTrue(plan.exists)
self.assertEqual(2, len(plan.changes))
self.assertEqual(2, provider.apply(plan))
provider._client._request.assert_has_calls(
[
call(
"DELETE", "http://api/zones/unit.tests/www.unit.tests./A"
),
call(
"PUT",
"http://api/zones/unit.tests/ttl.unit.tests./A",
data={
"ttl": 300,
"resource_records": [{"content": ["3.2.3.4"]}],
},
),
]
)
# TC: create dynamics
provider._client._request.reset_mock()
provider._client.zone_records = Mock(return_value=[])
# Domain exists, we don't care about return
resp.json.side_effect = ["{}"]
wanted = Zone("unit.tests.", [])
wanted.add_record(
Record.new(
wanted,
"geo-simple",
{
"ttl": 300,
"type": "A",
"value": "3.3.3.3",
"dynamic": {
"pools": {
"pool-1": {
"fallback": "other",
"values": [
{"value": "1.1.1.1"},
{"value": "1.1.1.2"},
],
},
"pool-2": {
"fallback": "other",
"values": [
{"value": "2.2.2.1"},
],
},
"other": {"values": [{"value": "3.3.3.3"}]},
},
"rules": [
{"pool": "pool-1", "geos": ["EU-RU"]},
{"pool": "pool-2", "geos": ["EU"]},
{"pool": "other"},
],
},
},
),
)
wanted.add_record(
Record.new(
wanted,
"geo-defaults",
{
"ttl": 300,
"type": "A",
"value": "3.2.3.4",
"dynamic": {
"pools": {
"pool-1": {
"values": [
{"value": "2.2.2.1"},
],
},
},
"rules": [
{"pool": "pool-1", "geos": ["EU"]},
],
},
},
),
)
wanted.add_record(
Record.new(
wanted,
"cname-smpl",
{
"ttl": 300,
"type": "CNAME",
"value": "en.unit.tests.",
"dynamic": {
"pools": {
"pool-1": {
"fallback": "other",
"values": [
{"value": "ru-1.unit.tests."},
{"value": "ru-2.unit.tests."},
],
},
"pool-2": {
"fallback": "other",
"values": [
{"value": "eu.unit.tests."},
],
},
"other": {"values": [{"value": "en.unit.tests."}]},
},
"rules": [
{"pool": "pool-1", "geos": ["EU-RU"]},
{"pool": "pool-2", "geos": ["EU"]},
{"pool": "other"},
],
},
},
),
)
wanted.add_record(
Record.new(
wanted,
"cname-dflt",
{
"ttl": 300,
"type": "CNAME",
"value": "en.unit.tests.",
"dynamic": {
"pools": {
"pool-1": {
"values": [
{"value": "eu.unit.tests."},
],
},
},
"rules": [
{"pool": "pool-1", "geos": ["EU"]},
],
},
},
),
)
plan = provider.plan(wanted)
self.assertTrue(plan.exists)
self.assertEqual(4, len(plan.changes))
self.assertEqual(4, provider.apply(plan))
provider._client._request.assert_has_calls(
[
call(
"POST",
"http://api/zones/unit.tests/geo-simple.unit.tests./A",
data={
"ttl": 300,
"filters": self.default_filters,
"resource_records": [
{
"content": ["1.1.1.1"],
"meta": {"countries": ["RU"]},
},
{
"content": ["1.1.1.2"],
"meta": {"countries": ["RU"]},
},
{
"content": ["2.2.2.1"],
"meta": {"continents": ["EU"]},
},
{
"content": ["3.3.3.3"],
"meta": {"default": True},
},
],
},
),
call(
"POST",
"http://api/zones/unit.tests/geo-defaults.unit.tests./A",
data={
"ttl": 300,
"filters": self.default_filters,
"resource_records": [
{
"content": ["2.2.2.1"],
"meta": {"continents": ["EU"]},
},
{
"content": ["3.2.3.4"],
},
],
},
),
call(
"POST",
"http://api/zones/unit.tests/cname-smpl.unit.tests./CNAME",
data={
"ttl": 300,
"filters": self.default_filters,
"resource_records": [
{
"content": ["ru-1.unit.tests."],
"meta": {"countries": ["RU"]},
},
{
"content": ["ru-2.unit.tests."],
"meta": {"countries": ["RU"]},
},
{
"content": ["eu.unit.tests."],
"meta": {"continents": ["EU"]},
},
{
"content": ["en.unit.tests."],
"meta": {"default": True},
},
],
},
),
call(
"POST",
"http://api/zones/unit.tests/cname-dflt.unit.tests./CNAME",
data={
"ttl": 300,
"filters": self.default_filters,
"resource_records": [
{
"content": ["eu.unit.tests."],
"meta": {"continents": ["EU"]},
},
{
"content": ["en.unit.tests."],
},
],
},
),
]
)

+ 160
- 5
tests/test_octodns_provider_ns1.py View File

@ -768,12 +768,70 @@ class TestNs1ProviderDynamic(TestCase):
monitor = {
'name': 'test monitor',
}
provider._client._notifylists_cache = {}
monitor_id, feed_id = provider._monitor_create(monitor)
self.assertEquals('mon-id', monitor_id)
self.assertEquals('feed-id', feed_id)
monitors_create_mock.assert_has_calls([call(name='test monitor',
notify_list='nl-id')])
@patch('octodns.provider.ns1.Ns1Provider._feed_create')
@patch('octodns.provider.ns1.Ns1Client.monitors_create')
@patch('octodns.provider.ns1.Ns1Client._try')
def test_monitor_create_shared_notifylist(self, try_mock,
monitors_create_mock,
feed_create_mock):
provider = Ns1Provider('test', 'api-key', shared_notifylist=True)
# pre-fill caches to avoid extranious calls (things we're testing
# elsewhere)
provider._client._datasource_id = 'foo'
provider._client._feeds_for_monitors = {}
# First time we'll need to create the share list
provider._client._notifylists_cache = {}
try_mock.reset_mock()
monitors_create_mock.reset_mock()
feed_create_mock.reset_mock()
try_mock.side_effect = [{
'id': 'nl-id',
'name': provider.SHARED_NOTIFYLIST_NAME,
}]
monitors_create_mock.side_effect = [{
'id': 'mon-id',
}]
feed_create_mock.side_effect = ['feed-id']
monitor = {
'name': 'test monitor',
}
monitor_id, feed_id = provider._monitor_create(monitor)
self.assertEquals('mon-id', monitor_id)
self.assertEquals('feed-id', feed_id)
monitors_create_mock.assert_has_calls([call(name='test monitor',
notify_list='nl-id')])
try_mock.assert_called_once()
# The shared notifylist should be cached now
self.assertEquals([provider.SHARED_NOTIFYLIST_NAME],
list(provider._client._notifylists_cache.keys()))
# Second time we'll use the cached version
try_mock.reset_mock()
monitors_create_mock.reset_mock()
feed_create_mock.reset_mock()
monitors_create_mock.side_effect = [{
'id': 'mon-id',
}]
feed_create_mock.side_effect = ['feed-id']
monitor = {
'name': 'test monitor',
}
monitor_id, feed_id = provider._monitor_create(monitor)
self.assertEquals('mon-id', monitor_id)
self.assertEquals('feed-id', feed_id)
monitors_create_mock.assert_has_calls([call(name='test monitor',
notify_list='nl-id')])
try_mock.assert_not_called()
def test_monitor_gen(self):
provider = Ns1Provider('test', 'api-key')
@ -986,6 +1044,12 @@ class TestNs1ProviderDynamic(TestCase):
'notify_list': 'nl-id',
}
}]
provider._client._notifylists_cache = {
'not shared': {
'id': 'nl-id',
'name': 'not shared',
}
}
provider._monitors_gc(record)
monitors_for_mock.assert_has_calls([call(record)])
datafeed_delete_mock.assert_has_calls([call('foo', 'feed-id')])
@ -1025,12 +1089,75 @@ class TestNs1ProviderDynamic(TestCase):
'notify_list': 'nl-id2',
},
}]
provider._client._notifylists_cache = {
'not shared': {
'id': 'nl-id',
'name': 'not shared',
},
'not shared 2': {
'id': 'nl-id2',
'name': 'not shared 2',
}
}
provider._monitors_gc(record, {'mon-id'})
monitors_for_mock.assert_has_calls([call(record)])
datafeed_delete_mock.assert_not_called()
monitors_delete_mock.assert_has_calls([call('mon-id2')])
notifylists_delete_mock.assert_has_calls([call('nl-id2')])
# Non-active monitor w/o a notifylist, generally shouldn't happen, but
# code should handle it just in case someone gets clicky in the UI
monitors_for_mock.reset_mock()
datafeed_delete_mock.reset_mock()
monitors_delete_mock.reset_mock()
notifylists_delete_mock.reset_mock()
monitors_for_mock.side_effect = [{
'y': {
'id': 'mon-id2',
'notify_list': 'nl-id2',
},
}]
provider._client._notifylists_cache = {
'not shared a': {
'id': 'nl-ida',
'name': 'not shared a',
},
'not shared b': {
'id': 'nl-idb',
'name': 'not shared b',
}
}
provider._monitors_gc(record, {'mon-id'})
monitors_for_mock.assert_has_calls([call(record)])
datafeed_delete_mock.assert_not_called()
monitors_delete_mock.assert_has_calls([call('mon-id2')])
notifylists_delete_mock.assert_not_called()
# Non-active monitor with a shared notifylist, monitor deleted, but
# notifylist is left alone
provider.shared_notifylist = True
monitors_for_mock.reset_mock()
datafeed_delete_mock.reset_mock()
monitors_delete_mock.reset_mock()
notifylists_delete_mock.reset_mock()
monitors_for_mock.side_effect = [{
'y': {
'id': 'mon-id2',
'notify_list': 'shared',
},
}]
provider._client._notifylists_cache = {
'shared': {
'id': 'shared',
'name': provider.SHARED_NOTIFYLIST_NAME,
},
}
provider._monitors_gc(record, {'mon-id'})
monitors_for_mock.assert_has_calls([call(record)])
datafeed_delete_mock.assert_not_called()
monitors_delete_mock.assert_has_calls([call('mon-id2')])
notifylists_delete_mock.assert_not_called()
@patch('octodns.provider.ns1.Ns1Provider._monitor_sync')
@patch('octodns.provider.ns1.Ns1Provider._monitors_for')
def test_params_for_dynamic_region_only(self, monitors_for_mock,
@ -2329,17 +2456,22 @@ class TestNs1Client(TestCase):
notifylists_list_mock.reset_mock()
notifylists_create_mock.reset_mock()
notifylists_delete_mock.reset_mock()
notifylists_create_mock.side_effect = ['bar']
notifylists_list_mock.side_effect = [{}]
expected = {
'id': 'nl-id',
'name': 'bar',
}
notifylists_create_mock.side_effect = [expected]
notify_list = [{
'config': {
'sourceid': 'foo',
},
'type': 'datafeed',
}]
nl = client.notifylists_create(name='some name',
notify_list=notify_list)
self.assertEquals('bar', nl)
notifylists_list_mock.assert_not_called()
got = client.notifylists_create(name='some name',
notify_list=notify_list)
self.assertEquals(expected, got)
notifylists_list_mock.assert_called_once()
notifylists_create_mock.assert_has_calls([
call({'name': 'some name', 'notify_list': notify_list})
])
@ -2353,6 +2485,29 @@ class TestNs1Client(TestCase):
notifylists_create_mock.assert_not_called()
notifylists_delete_mock.assert_has_calls([call('nlid')])
# Delete again, this time with a cache item that needs cleaned out and
# another that needs to be ignored
notifylists_list_mock.reset_mock()
notifylists_create_mock.reset_mock()
notifylists_delete_mock.reset_mock()
client._notifylists_cache = {
'another': {
'id': 'notid',
'name': 'another',
},
# This one comes 2nd on purpose
'the-one': {
'id': 'nlid',
'name': 'the-one',
},
}
client.notifylists_delete('nlid')
notifylists_list_mock.assert_not_called()
notifylists_create_mock.assert_not_called()
notifylists_delete_mock.assert_has_calls([call('nlid')])
# Only another left
self.assertEquals(['another'], list(client._notifylists_cache.keys()))
notifylists_list_mock.reset_mock()
notifylists_create_mock.reset_mock()
notifylists_delete_mock.reset_mock()


Loading…
Cancel
Save