Skip to content

Commit

Permalink
chore: testing workflow
Browse files Browse the repository at this point in the history
  • Loading branch information
salman2013 committed Apr 18, 2024
1 parent 01862d2 commit 3afa062
Show file tree
Hide file tree
Showing 2 changed files with 80 additions and 31 deletions.
21 changes: 20 additions & 1 deletion .github/workflows/check_dependencies.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ on:
defaults:
run:
shell: bash # strict bash
working-directory: ./scripts

jobs:
check_dependencies:
Expand All @@ -21,6 +20,26 @@ jobs:
with:
python-version: '3.8'

- name: Create repo_work directory
run: |
WORK_DIR="/tmp/unpack_reqs"
repo_name="${{ github.repository }}"
repo_work="$WORK_DIR/$repo_name"
mkdir -p "$repo_work"
echo "$repo_work"
- name: Copy Python requirements file
run: |
for req_file in "requirements/edx/base.txt" "requirements/base.txt" "requirements.txt"; do
if [ -f "$req_file" ]; then
cp "$req_file" /tmp/unpack_reqs/openedx/edx-platform/base.txt
echo "Python requirements file found: $req_file"
echo "Content of base.txt:"
cat "$req_file"
break
fi
done
- name: Install requests module
run: pip install requests

Expand Down
90 changes: 60 additions & 30 deletions scripts/find_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,32 @@ def request_package_info_url(package):
else:
print(f"Failed to retrieve data for package {package}. Status code:", response.status_code)

def find_file_in_project(filename):
"""
Recursively searches for a file within the project directory.
Args:
- filename: The name of the file to search for.
Returns:
- A list of file paths where the file was found.
"""
# Get the current working directory
project_root = os.getcwd()

# List to store paths of found files
found_paths = []

# Recursively search for the file in the project directory
for root, dirs, files in os.walk(project_root):
if filename in files:
# Construct the full path of the found file
file_path = os.path.join(root, filename)
found_paths.append(file_path)

return found_paths



def process_directory():
"""
Expand All @@ -313,37 +339,37 @@ def process_directory():
Also copies the considered dependencies file into the temp work directory,
for later analysis.
"""
repo_name = Path.cwd().name
repo_work = WORK_DIR / repo_name
repo_work.mkdir(parents=True, exist_ok=True)
# repo_name = Path.cwd().name
# repo_work = WORK_DIR / repo_name
# repo_work.mkdir(parents=True, exist_ok=True)
repo_urls = set()
package_names = []
openedx_packages = []
if (js_reqs := Path("package-lock.json")).exists():
shutil.copyfile(js_reqs, repo_work / "package-lock.json")
# if (js_reqs := Path("package-lock.json")).exists():
# shutil.copyfile(js_reqs, repo_work / "package-lock.json")
# with change_dir(repo_work):
# repo_urls.update(check_js_dependencies())
if (py_reqs := find_py_reqs()):
shutil.copyfile(py_reqs, repo_work / "base.txt")

with open(repo_work / "base.txt") as fbase:
# Read each line (package name) in the file
file_data = fbase.read()

# Splitting the data by lines
lines = file_data.strip().split('\n')
for line in lines:
# Print the package name
parts = line.split('#', 1)
package_name = parts[0].strip()
package_names.append(package_name)

for package in package_names:
if package != " ":
home_page = request_package_info_url(package)
if home_page is not None:
if match := urls_in_orgs([home_page], SECOND_PARTY_ORGS):
openedx_packages.append(home_page)
# if (py_reqs := find_py_reqs()):
# shutil.copyfile(py_reqs, repo_work / "base.txt")

with open(repo_work / "base.txt") as fbase:
# Read each line (package name) in the file
file_data = fbase.read()

# Splitting the data by lines
lines = file_data.strip().split('\n')
for line in lines:
# Print the package name
parts = line.split('#', 1)
package_name = parts[0].strip()
package_names.append(package_name)

for package in package_names:
if package != " ":
home_page = request_package_info_url(package)
if home_page is not None:
if match := urls_in_orgs([home_page], SECOND_PARTY_ORGS):
openedx_packages.append(home_page)

return openedx_packages

Expand Down Expand Up @@ -380,16 +406,20 @@ def main(dirs=None, org=None):
Analyze the requirements in all of the directories mentioned on the command line.
If arguments have newlines, treat each line as a separate directory.
"""
import pdb; pdb.set_trace()
if dirs is None:
repo_dir = sys.argv[1]
org_flag_index = sys.argv.index("--org")
org = sys.argv[org_flag_index + 1]
print(f"Creating new work directory: {WORK_DIR}")
shutil.rmtree(WORK_DIR, ignore_errors=True)
#print(f"Creating new work directory: {WORK_DIR}")
#shutil.rmtree(WORK_DIR, ignore_errors=True)
repo_urls = set()

with change_dir(repo_dir):
repo_urls.update(process_directory())
#with change_dir(repo_dir):
repo_urls.update(process_directory())




print("== DONE ==============")
print("Second-party:")
Expand Down

0 comments on commit 3afa062

Please sign in to comment.