Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update http source for required changes related to python3 #130

Merged
merged 3 commits into from
Nov 4, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions nss_cache/sources/httpsource.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import pycurl
import time
from urllib.parse import urljoin
from io import StringIO

from nss_cache import error
from nss_cache.maps import automount
Expand Down Expand Up @@ -267,7 +268,8 @@ def GetUpdates(self, source, url, since):
while retry_count < source.conf['retry_max']:
try:
source.log.debug('fetching %s', url)
(resp_code, headers, body) = curl.CurlFetch(url, conn, self.log)
(resp_code, headers,
body_bytes) = curl.CurlFetch(url, conn, self.log)
self.log.debug('response code: %s', resp_code)
finally:
if resp_code < 400:
Expand Down Expand Up @@ -302,10 +304,13 @@ def GetUpdates(self, source, url, since):

# curl (on Ubuntu hardy at least) will handle gzip, but not bzip2
try:
response = bz2.decompress(body)
body_bytes = bz2.decompress(body_bytes)
self.log.debug('bzip encoding found')
except IOError:
response = body
self.log.debug('bzip encoding not found')

# Wrap in a stringIO so that it can be looped on by newlines in the parser
response = StringIO(body_bytes.decode('utf-8'))

data_map = self.GetMap(cache_info=response)
if http_ts_string:
Expand Down
57 changes: 56 additions & 1 deletion nss_cache/sources/httpsource_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,12 @@

__author__ = 'blaedd@google.com (David MacKinnon)'

import base64
import time
import unittest
import pycurl
from mox3 import mox
from io import BytesIO

from nss_cache import error
from nss_cache.maps import automount
Expand All @@ -32,6 +34,7 @@

from nss_cache.sources import httpsource
from nss_cache.util import file_formats
from nss_cache.util import curl


class TestHttpSource(unittest.TestCase):
Expand Down Expand Up @@ -245,7 +248,7 @@ def testCreateMap(self):
self.assertTrue(isinstance(self.updater.CreateMap(), passwd.PasswdMap))


class TestShadowUpdateGetter(unittest.TestCase):
class TestShadowUpdateGetter(mox.MoxTestBase):

def setUp(self):
super(TestShadowUpdateGetter, self).setUp()
Expand All @@ -260,6 +263,58 @@ def testGetParser(self):
def testCreateMap(self):
self.assertTrue(isinstance(self.updater.CreateMap(), shadow.ShadowMap))

def testShadowGetUpdatesWithContent(self):
mock_conn = self.mox.CreateMockAnything()
mock_conn.setopt(mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes()
mock_conn.getinfo(pycurl.INFO_FILETIME).AndReturn(-1)

self.mox.StubOutWithMock(pycurl, 'Curl')
pycurl.Curl().AndReturn(mock_conn)

self.mox.StubOutWithMock(curl, 'CurlFetch')

curl.CurlFetch('https://TEST_URL', mock_conn,
self.updater.log).AndReturn([
200, "",
BytesIO(b"""usera:x:::::::
userb:x:::::::
""").getvalue()
])

self.mox.ReplayAll()
config = {}
source = httpsource.HttpFilesSource(config)
result = self.updater.GetUpdates(source, 'https://TEST_URL', 1)
print(result)
self.assertEqual(len(result), 2)

def testShadowGetUpdatesWithBz2Content(self):
mock_conn = self.mox.CreateMockAnything()
mock_conn.setopt(mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes()
mock_conn.getinfo(pycurl.INFO_FILETIME).AndReturn(-1)

self.mox.StubOutWithMock(pycurl, 'Curl')
pycurl.Curl().AndReturn(mock_conn)

self.mox.StubOutWithMock(curl, 'CurlFetch')

curl.CurlFetch(
'https://TEST_URL', mock_conn, self.updater.log
).AndReturn([
200, "",
BytesIO(
base64.b64decode(
"QlpoOTFBWSZTWfm+rXYAAAvJgAgQABAyABpAIAAhKm1GMoQAwRSpHIXejGQgz4u5IpwoSHzfVrsA"
)).getvalue()
])

self.mox.ReplayAll()
config = {}
source = httpsource.HttpFilesSource(config)
result = self.updater.GetUpdates(source, 'https://TEST_URL', 1)
print(result)
self.assertEqual(len(result), 2)


class TestGroupUpdateGetter(unittest.TestCase):

Expand Down
13 changes: 7 additions & 6 deletions nss_cache/util/curl.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import logging
import pycurl
from io import StringIO
from io import BytesIO

from nss_cache import error

Expand All @@ -32,8 +32,8 @@ def CurlFetch(url, conn=None, logger=None):
conn = pycurl.Curl()

conn.setopt(pycurl.URL, url)
conn.body = StringIO()
conn.headers = StringIO()
conn.body = BytesIO()
conn.headers = BytesIO()
conn.setopt(pycurl.WRITEFUNCTION, conn.body.write)
conn.setopt(pycurl.HEADERFUNCTION, conn.headers.write)
try:
Expand All @@ -42,7 +42,8 @@ def CurlFetch(url, conn=None, logger=None):
HandleCurlError(e, logger)
raise error.Error(e)
resp_code = conn.getinfo(pycurl.RESPONSE_CODE)
return (resp_code, conn.headers.getvalue(), conn.body.getvalue())
return (resp_code, conn.headers.getvalue().decode('utf-8'),
conn.body.getvalue())


def HandleCurlError(e, logger=None):
Expand All @@ -63,8 +64,8 @@ def HandleCurlError(e, logger=None):
if not logger:
logger = logging

code = e[0]
msg = e[1]
code = e.args[0]
msg = e.args[1]

# Config errors
if code in (pycurl.E_UNSUPPORTED_PROTOCOL, pycurl.E_URL_MALFORMAT,
Expand Down
36 changes: 35 additions & 1 deletion nsscache.conf.5
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ A complete list of configuration options follows.
Specifies the source to use to retrieve NSS data from.

Valid Options:
.I ldap, s3
.I ldap, s3, http

.TP
.B cache
Expand Down Expand Up @@ -295,6 +295,40 @@ array of records in json format. E.g.
Valid attributes:
.I "sshPublicKey"

.SH http SOURCE OPTIONS

These options configure the behaviour of the
.I http
source.

.TP
.B http_passwd_url
URL for an HTTP endpoint that returns a file containing
.B passwd
records in the standard format. E.g.
.I root:*:0:0:System Administrator:/var/root:/bin/sh

.TP
.B http_group_url
URL for an HTTP endpoint that returns a file containing
.B group
records in the standard format. E.g.
.I users:x:100:memberships....

.TP
.B http_shadow_url
URL for an HTTP endpoint that returns a file containing
.B shadow
records in the standard format. E.g.
.I root:*:18866:0:99999:7:::

.TP
.B http_sshkey_url
URL for an HTTP endpoint that returns a file containing
.B sshkey
records in the standard format. E.g.
.I root:ssh-rsa ...

.SH nssdb CACHE OPTIONS

These options configure the behaviour of the
Expand Down