From 1f63013026d535d7daee6c73ac5aca99c6c88ad5 Mon Sep 17 00:00:00 2001 From: Prasanth Date: Sat, 27 Mar 2021 19:38:51 +0530 Subject: [PATCH] remove unused --- .github/workflows/docker-publish.yml | 60 ------- BUILD.md | 0 Dockerfile | 25 --- Dockerfile.windows | 13 -- NEXT_STEPS.md | 44 ----- README.md | 2 +- TASKS.md | 56 ------ dothttp-cli-one.spec | 33 ---- dothttp-cli.py | 3 - dothttp-cli.spec | 36 ---- pyoxidizer.bzl | 256 --------------------------- setup.py | 3 +- 12 files changed, 3 insertions(+), 528 deletions(-) delete mode 100644 .github/workflows/docker-publish.yml delete mode 100644 BUILD.md delete mode 100644 Dockerfile delete mode 100644 Dockerfile.windows delete mode 100644 NEXT_STEPS.md delete mode 100644 TASKS.md delete mode 100644 dothttp-cli-one.spec delete mode 100644 dothttp-cli.py delete mode 100644 dothttp-cli.spec delete mode 100644 pyoxidizer.bzl diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml deleted file mode 100644 index c476294..0000000 --- a/.github/workflows/docker-publish.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: Docker - -on: - push: - branches: - - master - - # Publish `v1.2.3` tags as releases. - tags: - - v* -env: - IMAGE_NAME: cedric05/dothttp - IMAGE_NAME_REL: cedric05/dothttp-rel - -jobs: - push: - runs-on: ubuntu-latest - if: github.event_name == 'push' - - steps: - # using github image, so had to login before hand - - uses: actions/checkout@v2 - - name: Log into registry - run: echo ${{ secrets.CR_PAT }} | docker login ghcr.io -u $GITHUB_ACTOR --password-stdin - - - name: Build image - run: | - docker build --target builder . --file Dockerfile --tag $IMAGE_NAME - docker build --file Dockerfile --tag $IMAGE_NAME_REL . - docker run --rm --entrypoint cat $IMAGE_NAME_REL /app/dothttp-cli.zip > dothttp-cli.zip - - name: Push image - run: | - IMAGE_ID=ghcr.io/$IMAGE_NAME - - # Change all uppercase to lowercase - IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]') - - # Strip git ref prefix from version - VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') - - # Strip "v" prefix from tag name - [[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//') - - # Use Docker `latest` tag convention - [ "$VERSION" == "master" ] && VERSION=latest - - echo IMAGE_ID=$IMAGE_ID - echo VERSION=$VERSION - - docker tag $IMAGE_NAME $IMAGE_ID:$VERSION - docker push $IMAGE_ID:$VERSION - - name: Upload binaries to release - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - asset_name: dothttp_0.0.4_amd64.zip - file: dothttp-cli.zip - tag: ${{ github.ref }} - overwrite: true - file_glob: true diff --git a/BUILD.md b/BUILD.md deleted file mode 100644 index e69de29..0000000 diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 95dbf90..0000000 --- a/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -FROM ghcr.io/cedric05/python3.9:main as builder - -LABEL maintainer="kesavarapu.siva@gmail.com" - -# whalebrew recommendations, won't harm any mainstream flow -LABEL io.whalebrew.config.networks '["host"]' -LABEL io.whalebrew.config.environment '["PYTHONPATH=/app"]' -LABEL io.whalebrew.config.working_dir '/work' - -# install pipenv first -RUN pip install pipenv -WORKDIR /app -# install dependencies -COPY Pipfile.lock Pipfile /app/ -RUN pipenv install --system --deploy -# as a final step, add source -ADD . /app -# entrypoint or command -# ENTRYPOINT [ "bash" ] -ENTRYPOINT ["python", "-m", "dothttp"] - -FROM builder -RUN pipenv install --dev --system -RUN apt update && apt install zip -RUN pyinstaller dothttp-cli.spec && cd dist/ && zip -r ../dothttp-cli.zip dothttp-cli/ && cd .. && rm -rf dist build diff --git a/Dockerfile.windows b/Dockerfile.windows deleted file mode 100644 index 9284884..0000000 --- a/Dockerfile.windows +++ /dev/null @@ -1,13 +0,0 @@ -FROM python:3.9.1-windowsservercore-1809 as builder -LABEL maintainer="kesavarapu.siva@gmail.com" -RUN pip install pipenv -WORKDIR C:\\dothttp -COPY Pipfile.lock Pipfile C:\\dothttp\\ -RUN pipenv install --system --deploy -ADD . . -RUN pipenv install --dev --system -ENTRYPOINT ["python", "-m", "dothttp"] - -FROM builder -RUN pyinstaller .\dothttp-cli.py --add-data 'dothttp\http.tx;.' -RUN Compress-Archive -DestinationPath dothttp-cli-windows.zip -Path dist\dothttp-cli \ No newline at end of file diff --git a/NEXT_STEPS.md b/NEXT_STEPS.md deleted file mode 100644 index 2f60a3d..0000000 --- a/NEXT_STEPS.md +++ /dev/null @@ -1,44 +0,0 @@ -For better compatibility with vscode, it needs separate communication channels - -1. For sending information about which file, and options -2. actual response (for terminal based ) -3. for text editors, it will want to show highlights for which it will need to take a peek of headers. sending headers - for terminal could be bad. - -options for communication - -1) http protocol -2) shell based protocol - -request - -```json -{ - "requestType": "runFile", - "uid": "", - "file": "<>", - "env": [ - "", - "", - "" - ], - "props": { - "": "", - "": "" - } -} -``` - -response types needed - -```json -{ - "uid": "", - "error": "property not found, server not running ...., timeout exceeded", - "response": "", - "headers": { - "content-type": "application/json" - }, - "statusCode": 200 -} -``` diff --git a/README.md b/README.md index 3962733..0279b9c 100644 --- a/README.md +++ b/README.md @@ -85,7 +85,7 @@ data({ ### From pypi ```shell -pip install dothttp-req==0.0.7 +pip install dothttp-req==0.0.10 ``` ### From source diff --git a/TASKS.md b/TASKS.md deleted file mode 100644 index 32abde5..0000000 --- a/TASKS.md +++ /dev/null @@ -1,56 +0,0 @@ -- [x] variable substitution -- [x] properties file -- [x] property from command line substitution -- [x] support various data types like json, text (headers should be added automatically) -- [x] tests -- [x] file uploads -- [x] handle 200, 300, 400, 500 exception (user readable messages) -- [x] should handle exceptions, only message should be shown. -- [x] debug mode to show logs -- [x] info logs, (just headers and response) -- [x] multi form upload -- [X] command line arguments to curl and to make http request -- [x] basic auth -- [x] cookie storage and support -- [x] update README.md to explain syntax and examples to refer -- [x] easy installation package using pyinstaller, docker, brew - - [x] pyinstaller - - [x] docker - - [x] whalebrew -- [x] default headers defined in prop file -- [x] property value in http file itself -- [x] property syntax with double paranthesis -- [x] comments (using #, so shebang support) -- [x] payload 's data should support json - -> form input -- [x] release, packages to github -- [ ] support binary output -- [ ] language server, formatter - - [x] HttpFileFormatter - - [ ] current implementation removes comments from file. add them back - - [x] [vscode plugin](https://github.com/cedric05/dothttp-code.git) - - [x] code highlights - - [ ] suggestions - - [x] run in vscode(itself) -- [ ] 50% integration tests -- [x] add tests with target and multi hosts - - [x] add tests for curl (more scenarios) - - [x] target negative tests -- [x] 80% coverage unit tests -- [x] Better message when connection refused issues -- [x] standardise dependency versions -- [x] curl print multiline -- [x] ~~use sub commands for running server~~ (dotextensions if future could be moved to different repository, so don't - have to) -- [ ] syntax issues better error reporting - - [ ] if it failed to parse certain text, show examples on how to fix it -- [x] ~~history ?~~ (like curl/httpie it will not remember history) (support will be provided by vscode-extension) -- [x] ~~multiple logging options ?~~ -- [x] ~~output coloring (according to content-type) ?~~ -- [x] ~~output should have rolling output format ?~~ -- [ ] curl import -- [ ] swagger to http file generator -- [x] postman import - -# BUGS - -- [x] ~~list query parameters are not working ?ram=ranga&ram=rajesh~~ diff --git a/dothttp-cli-one.spec b/dothttp-cli-one.spec deleted file mode 100644 index f41382f..0000000 --- a/dothttp-cli-one.spec +++ /dev/null @@ -1,33 +0,0 @@ -# -*- mode: python ; coding: utf-8 -*- - -block_cipher = None - - -a = Analysis(['dothttp-cli.py'], - pathex=['.'], - binaries=[], - datas=[('dothttp/http.tx', '.')], - hiddenimports=[], - hookspath=[], - runtime_hooks=[], - excludes=[], - win_no_prefer_redirects=False, - win_private_assemblies=False, - cipher=block_cipher, - noarchive=False) -pyz = PYZ(a.pure, a.zipped_data, - cipher=block_cipher) -exe = EXE(pyz, - a.scripts, - a.binaries, - a.zipfiles, - a.datas, - [], - name='dothttp-cli', - debug=False, - bootloader_ignore_signals=False, - strip=False, - upx=True, - upx_exclude=[], - runtime_tmpdir=None, - console=True ) diff --git a/dothttp-cli.py b/dothttp-cli.py deleted file mode 100644 index 0cfb12e..0000000 --- a/dothttp-cli.py +++ /dev/null @@ -1,3 +0,0 @@ -from dothttp.__main__ import main - -main() diff --git a/dothttp-cli.spec b/dothttp-cli.spec deleted file mode 100644 index 60bd768..0000000 --- a/dothttp-cli.spec +++ /dev/null @@ -1,36 +0,0 @@ -# -*- mode: python ; coding: utf-8 -*- - -block_cipher = None - -a = Analysis(['dothttp-cli.py'], - pathex=['.'], - binaries=[], - datas=[('dothttp/http.tx', '.')], - hiddenimports=[], - hookspath=[], - runtime_hooks=[], - excludes=[], - win_no_prefer_redirects=False, - win_private_assemblies=False, - cipher=block_cipher, - noarchive=False) -pyz = PYZ(a.pure, a.zipped_data, - cipher=block_cipher) -exe = EXE(pyz, - a.scripts, - [], - exclude_binaries=True, - name='dothttp', - debug=False, - bootloader_ignore_signals=False, - strip=False, - upx=True, - console=True) -coll = COLLECT(exe, - a.binaries, - a.zipfiles, - a.datas, - strip=False, - upx=True, - upx_exclude=[], - name='dothttp-cli') diff --git a/pyoxidizer.bzl b/pyoxidizer.bzl deleted file mode 100644 index a2a6df0..0000000 --- a/pyoxidizer.bzl +++ /dev/null @@ -1,256 +0,0 @@ -# This file defines how PyOxidizer application building and packaging is -# performed. See the pyoxidizer crate's documentation for extensive -# documentation on this file format. - -# Obtain the default PythonDistribution for our build target. We link -# this distribution into our produced executable and extract the Python -# standard library from it. -def make_dist(): - return default_python_distribution() - -# Configuration files consist of functions which define build "targets." -# This function creates a Python executable and installs it in a destination -# directory. -def make_exe(dist): - # This function creates a `PythonPackagingPolicy` instance, which - # influences how executables are built and how resources are added to - # the executable. You can customize the default behavior by assigning - # to attributes and calling functions. - policy = dist.make_python_packaging_policy() - - # Enable support for non-classified "file" resources to be added to - # resource collections. - # policy.allow_files = True - - # Control support for loading Python extensions and other shared libraries - # from memory. This is only supported on Windows and is ignored on other - # platforms. - # policy.allow_in_memory_shared_library_loading = True - - # Control whether to generate Python bytecode at various optimization - # levels. The default optimization level used by Python is 0. - # policy.bytecode_optimize_level_zero = True - # policy.bytecode_optimize_level_one = True - # policy.bytecode_optimize_level_two = True - - # Package all available Python extensions in the distribution. - # policy.extension_module_filter = "all" - - # Package the minimum set of Python extensions in the distribution needed - # to run a Python interpreter. Various functionality from the Python - # standard library won't work with this setting! But it can be used to - # reduce the size of generated executables by omitting unused extensions. - # policy.extension_module_filter = "minimal" - - # Package Python extensions in the distribution not having additional - # library dependencies. This will exclude working support for SSL, - # compression formats, and other functionality. - # policy.extension_module_filter = "no-libraries" - - # Package Python extensions in the distribution not having a dependency on - # GPL licensed software. - # policy.extension_module_filter = "no-gpl" - - # Controls whether the file scanner attempts to classify files and emit - # resource-specific values. - # policy.file_scanner_classify_files = True - - # Controls whether `File` instances are emitted by the file scanner. - # policy.file_scanner_emit_files = False - - # Controls the `add_include` attribute of "classified" resources - # (`PythonModuleSource`, `PythonPackageResource`, etc). - # policy.include_classified_resources = True - - # Toggle whether Python module source code for modules in the Python - # distribution's standard library are included. - # policy.include_distribution_sources = False - - # Toggle whether Python package resource files for the Python standard - # library are included. - # policy.include_distribution_resources = False - - # Controls the `add_include` attribute of `File` resources. - # policy.include_file_resources = False - - # Controls the `add_include` attribute of `PythonModuleSource` not in - # the standard library. - # policy.include_non_distribution_sources = True - - # Toggle whether files associated with tests are included. - # policy.include_test = False - - # Resources are loaded from "in-memory" or "filesystem-relative" paths. - # The locations to attempt to add resources to are defined by the - # `resources_location` and `resources_location_fallback` attributes. - # The former is the first/primary location to try and the latter is - # an optional fallback. - - # Use in-memory location for adding resources by default. - policy.resources_location = "in-memory" - - # Use filesystem-relative location for adding resources by default. - # policy.resources_location = "filesystem-relative:prefix" - - # Attempt to add resources relative to the built binary when - # `resources_location` fails. - # policy.resources_location_fallback = "filesystem-relative:prefix" - - # Clear out a fallback resource location. - # policy.resources_location_fallback = None - - # Define a preferred Python extension module variant in the Python distribution - # to use. - # policy.set_preferred_extension_module_variant("foo", "bar") - - # Configure policy values to classify files as typed resources. - # (This is the default.) - # policy.set_resource_handling_mode("classify") - - # Configure policy values to handle files as files and not attempt - # to classify files as specific types. - # policy.set_resource_handling_mode("files") - - # This variable defines the configuration of the embedded Python - # interpreter. By default, the interpreter will run a Python REPL - # using settings that are appropriate for an "isolated" run-time - # environment. - # - # The configuration of the embedded Python interpreter can be modified - # by setting attributes on the instance. Some of these are - # documented below. - python_config = dist.make_python_interpreter_config() - - # Make the embedded interpreter behave like a `python` process. - # python_config.config_profile = "python" - - # Set initial value for `sys.path`. If the string `$ORIGIN` exists in - # a value, it will be expanded to the directory of the built executable. - # python_config.module_search_paths = ["$ORIGIN/lib"] - - # Use jemalloc as Python's memory allocator - # python_config.raw_allocator = "jemalloc" - - # Use the system allocator as Python's memory allocator. - # python_config.raw_allocator = "system" - - # Control whether `oxidized_importer` is the first importer on - # `sys.meta_path`. - # python_config.oxidized_importer = False - - # Enable the standard path-based importer which attempts to load - # modules from the filesystem. - # python_config.filesystem_importer = True - - # Set `sys.frozen = True` - # python_config.sys_frozen = True - - # Set `sys.meipass` - # python_config.sys_meipass = True - - # Write files containing loaded modules to the directory specified - # by the given environment variable. - # python_config.write_modules_directory_env = "/tmp/oxidized/loaded_modules" - - # Evaluate a string as Python code when the interpreter starts. - # python_config.run_command = "" - - # Run a Python module as __main__ when the interpreter starts. - # python_config.run_module = "" - - # Run a Python file when the interpreter starts. - # python_config.run_filename = "/path/to/file" - - # Produce a PythonExecutable from a Python distribution, embedded - # resources, and other options. The returned object represents the - # standalone executable that will be built. - exe = dist.to_python_executable( - name="dothttp", - - # If no argument passed, the default `PythonPackagingPolicy` for the - # distribution is used. - packaging_policy=policy, - - # If no argument passed, the default `PythonInterpreterConfig` is used. - config=python_config, - ) - - # Install tcl/tk support files to a specified directory so the `tkinter` Python - # module works. - # exe.tcl_files_path = "lib" - - # Make the executable a console application on Windows. - # exe.windows_subsystem = "console" - - # Make the executable a non-console application on Windows. - # exe.windows_subsystem = "windows" - - # Invoke `pip download` to install a single package using wheel archives - # obtained via `pip download`. `pip_download()` returns objects representing - # collected files inside Python wheels. `add_python_resources()` adds these - # objects to the binary, with a load location as defined by the packaging - # policy's resource location attributes. - #exe.add_python_resources(exe.pip_download(["pyflakes==2.2.0"])) - - # Invoke `pip install` with our Python distribution to install a single package. - # `pip_install()` returns objects representing installed files. - # `add_python_resources()` adds these objects to the binary, with a load - # location as defined by the packaging policy's resource location - # attributes. - # exe.add_python_resources(exe.pip_install(["dothttp"])) - - # Invoke `pip install` using a requirements file and add the collected resources - # to our binary. - exe.add_python_resources(exe.pip_install(["-r", "requirements.txt"])) - - - - # Read Python files from a local directory and add them to our embedded - # context, taking just the resources belonging to the `foo` and `bar` - # Python packages. - # exe.add_python_resources(exe.read_package_root( - # path="/home/prasanth/cedric05/dothttp", - # packages=['dothttp'], - # )) - - # Discover Python files from a virtualenv and add them to our embedded - # context. - #exe.add_python_resources(exe.read_virtualenv(path="/path/to/venv")) - - # Filter all resources collected so far through a filter of names - # in a file. - #exe.filter_from_files(files=["/path/to/filter-file"])) - - # Return our `PythonExecutable` instance so it can be built and - # referenced by other consumers of this target. - return exe - -def make_embedded_resources(exe): - return exe.to_embedded_resources() - -def make_install(exe): - # Create an object that represents our installed application file layout. - files = FileManifest() - - # Add the generated executable to our install layout in the root directory. - files.add_python_resource(".", exe) - - return files - -# Tell PyOxidizer about the build targets defined above. -register_target("dist", make_dist) -register_target("exe", make_exe, depends=["dist"]) -register_target("resources", make_embedded_resources, depends=["exe"], default_build_script=True) -register_target("install", make_install, depends=["exe"], default=True) - -# Resolve whatever targets the invoker of this configuration file is requesting -# be resolved. -resolve_targets() - -# END OF COMMON USER-ADJUSTED SETTINGS. -# -# Everything below this is typically managed by PyOxidizer and doesn't need -# to be updated by people. - -PYOXIDIZER_VERSION = "0.10.3" -PYOXIDIZER_COMMIT = "UNKNOWN" diff --git a/setup.py b/setup.py index ce2718c..88929d3 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ def requirements(): setup( name="dothttp_req", - version="0.0.10.dev", + version="0.0.10", author="prasanth", author_email="kesavarapu.siva@gmail.com", description=("DotHttp recommended tool for making http requests."), @@ -34,6 +34,7 @@ def requirements(): options={"bdist_wheel": {"universal": False}}, packages=find_packages(), install_requires=requirements(), + extras_require= {}, long_description=read('README.md'), long_description_content_type=('text/markdown'), classifiers=[