Developing Software with Generative AI¶

In [ ]:
from dotenv import load_dotenv
In [ ]:
load_dotenv()
Out[ ]:
True

Writing code with LLMs¶

Small local model¶

StarChat 코드생성 모델

In [ ]:
from langchain.llms.huggingface_hub import HuggingFaceHub
llm = HuggingFaceHub(
    task="text-generation",
    repo_id="HuggingFaceH4/starchat-beta",
    model_kwargs={
        "temperature": 0.5,
        "max_length": 4000,
    }
)
print(llm("Write a function that calculates prime numbers until a certain number."))
/home/shpark/anaconda3/lib/python3.11/site-packages/huggingface_hub/utils/_deprecation.py:131: FutureWarning: 'InferenceApi' (from 'huggingface_hub.inference_api') is deprecated and will be removed from version '1.0'. `InferenceApi` client is deprecated in favor of the more feature-complete `InferenceClient`. Check out this guide to learn how to convert your script to use it: https://huggingface.co/docs/huggingface_hub/guides/inference#legacy-inferenceapi-client.
  warnings.warn(warning_message, FutureWarning)

Here's a function that calculates all prime numbers up to a given number:
```python
def prime_numbers(n):
    # Generate a list of all numbers from 2 to n
    numbers = list(range(2, n+1))
    
    # Loop over the numbers and remove all multiples of numbers that are not prime
    for i in range(2, int(n**0.5)+1):
        if i**2 > n:
           

Salesforce 코드생성 모델

In [ ]:
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
checkpoint ="Salesforce/codegen-350M-mono"
model = AutoModelForCausalLM.from_pretrained(checkpoint)
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
pipe = pipeline(task="text-generation", 
                model=model, 
                tokenizer=tokenizer, 
                max_new_tokens=500
)
text ="""
def calculate_primes(n):
\"\"\"Create a list of consecutive integers from 2 up to N.
For example:
>>> calculate_primes(20)
Output: [2, 3, 5, 7, 11, 13, 17, 19]
\"\"\"
"""
completion = pipe(text)
print(completion[0]["generated_text"])
Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.
def calculate_primes(n):
"""Create a list of consecutive integers from 2 up to N.
For example:
>>> calculate_primes(20)
Output: [2, 3, 5, 7, 11, 13, 17, 19]
"""
    primes = []
    
    primes = [2]
    if n%2 == 0 :
        return([2])
    else :
        while True:
            remul_factor= n//primes[-1]
            primes.append(primes[-1]*remul_factor)
            x= primes[-1]-primes[-2]
            if x<0:
                return(primes)
            elif x==0:
                primes.append(primes[-1])
            elif x==primes[-1]:
                primes.append(x//primes[-1])

if __name__ == "__main__":
    import doctest
    if doctest.testmod().failed == 0:
        print("\n✨ ALL TESTS PASSED. ✨")

In [ ]:
from langchain.llms.huggingface_pipeline import HuggingFacePipeline
llm = HuggingFacePipeline(pipeline=pipe)
In [ ]:
code_txt=llm(text)
Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.
In [ ]:
print(code_txt)
    prime = [2, 3, 5, 7, 11, 13, 17, 19]

    for x in range(2, n):
        i = 2
        while i * i <= x:
            if x % i == 0:
                prime.insert(0, i)
                break
            i += 1

    return prime


# If/else statements

def sum_primes(n):
    """sum of all numbers that are a valid prime number up to and
    including n. This code uses an optimized binary search.
    For example: (2, 19) would sum up 0 to 2, 19. This code
    is more efficient than brute force
    """
    for x in range(n):
        num = 2
        while num * num <= n:
            if n % num == 0:
                n /= num
                break
            num += 1

    return n


def get_prime_generator():
    """Function that prints a list of primes to your
    computer. Returns an integer of 2, 3, 5, etc."""


def fib(num):
    """Print the series given n from 100 to 0.
    Input:
    num = an integer greater than or equal to 0
    Output:
    Fibonacci series up to num
    """


def is_prime(n):
    """is a number prime? Returns a boolean."""


def print_prime():
    """Simple print statement.
    Input:
    no input required.
    Output:
    Prime numbers.
    """

Automating software development¶

LangChain에는 수학 문제를 풀기 위해 Python 코드를 실행하는 LLMMathChain과 시스템 관리 작업에 도움이 될 수 있는 Bash 터미널 명령을 실행하는 BashChain과 같은 코드 실행을 위한 여러 가지 통합 기능이 있습니다. 그러나 이러한 도구는 문제 해결에는 유용하지만 대규모 소프트웨어 개발 프로세스를 다루지는 않습니다.

그러나 코드 문제를 해결하는 이러한 접근 방식은 다음과 같이 꽤 잘 작동할 수있습니다.

In [ ]:
# ref : https://rfriend.tistory.com/835
from langchain.chat_models import ChatOpenAI
from langchain_experimental.utilities import PythonREPL
from langchain.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
template = """
Write some python code to solve the user's problem.
Include print(result) at the end for user to check the result.
Return only python code without the markdown formatting and nothing else.
"""

prompt = ChatPromptTemplate.from_messages(
    [("system",template),("human","{question}")])

model = ChatOpenAI(model="gpt-4-1106-preview",temperature=0)

# python codes generation
python_code_chain = prompt | model | StrOutputParser()

# python REPL (Read-Eval-Print-Loop)
python_code_run_chain = python_code_chain | PythonREPL().run
In [ ]:
# python code generation
print(python_code_chain.invoke({"question":"Write a function 'calculate_primes(limit)' that calculates prime numbers"}))
def calculate_primes(limit):
    primes = []
    for num in range(2, limit + 1):
        is_prime = True
        for i in range(2, int(num ** 0.5) + 1):
            if num % i == 0:
                is_prime = False
                break
        if is_prime:
            primes.append(num)
    return primes

# Example usage:
result = calculate_primes(100)
print(result)
In [ ]:
print(
    python_code_run_chain.invoke(
        {"question":"Write a function 'calculate_primes(limit)' that calculates prime numbers. And calculate_primes(20)."}
    )
)
[2, 3, 5, 7, 11, 13, 17, 19]

In [ ]:
from langchain.chat_models import ChatOpenAI
from langchain.agents import AgentType
from langchain_experimental.agents.agent_toolkits import create_python_agent
from langchain_experimental.tools import PythonREPLTool


llm = ChatOpenAI(model="gpt-4-1106-preview",temperature=0)

python_agent = create_python_agent(
    llm=llm,
    tool=PythonREPLTool(),
    agent_type=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
    verbose=True)
In [ ]:
python_agent.run("What are the prime numbers until 20?")

> Entering new AgentExecutor chain...
To find the prime numbers up to 20, I can write a Python function that checks each number from 2 to 20 to see if it is prime. A number is considered prime if it is greater than 1 and has no divisors other than 1 and itself. I will execute this function in the Python REPL.

Action: Python_REPL
Action Input: 
def is_prime(n):
    if n <= 1:
        return False
    for i in range(2, int(n**0.5) + 1):
        if n % i == 0:
            return False
    return True

prime_numbers = [x for x in range(2, 21) if is_prime(x)]
print(prime_numbers)
Observation: [2, 3, 5, 7, 11, 13, 17, 19]

Thought:I now know the final answer

Final Answer: The prime numbers until 20 are [2, 3, 5, 7, 11, 13, 17, 19].

> Finished chain.
Out[ ]:
'The prime numbers until 20 are [2, 3, 5, 7, 11, 13, 17, 19].'

MetaGPT

  • 소개 : https://www.unite.ai/ko/metagpt-complete-guide-to-the-best-ai-agent-available-right-now/

  • Git : https://github.com/geekan/metagpt

  • npm, nodejs 설치

    • https://www.freecodecamp.org/korean/news/how-to-update-node-and-npm-to-the-latest-version/
  • 유사 프레임워크

    • Auto-GPT
    • GPT-Engineer
      • 참고: https://insight.infograb.net/blog/2023/06/27/gpt-engineer/
    • SuperAGI
    • BabyAGI

개인적으로 너무 훌륭하다는 생각이 든다.
"Write a web crawler server using python flask that gets just the text for any web URL I enter." 만 입력 했는데 아래 그림처럼 프로젝트를 생성하고 코딩, 문서화, 설계까지 해준다니 놀랍네.

image.png image-2.png

In [ ]:
import asyncio
import nest_asyncio
from metagpt.team import Team
from metagpt.roles import ProjectManager, ProductManager, Architect, Engineer

async def startup(idea: str, investment: float=2.0, n_round: int=5, implement: bool=False,code_review: bool=False, test: bool=False):
    team = Team()
    team.hire([ProductManager(), Architect(), ProjectManager()])
    if implement or code_review :
        team.hire([Engineer(n_borg=5,use_code_review=code_review)])
    team.invest(investment)
    team.run_project(idea)
    await team.run(n_round=n_round)
2024-01-05 23:44:44.784 | INFO     | metagpt.const:get_metagpt_package_root:32 - Package root set to /home/shpark/python_dev/openai/langchain
2024-01-05 23:44:44.949 | INFO     | metagpt.config:get_default_llm_provider_enum:116 - LLMProviderEnum.OPENAI Model: gpt-4-1106-preview
2024-01-05 23:44:44.950 | INFO     | metagpt.config:get_default_llm_provider_enum:118 - API: LLMProviderEnum.OPENAI
In [ ]:
# Jupyter notebook에서 asyncio를 사용하기 위해 호출
nest_asyncio.apply()
asyncio.run(
startup("Write a web crawler server using python flask that gets just the text for any web URL I enter.",
        implement=True,
        code_review=True,
        )
)

langchain Plan and Execute

In [ ]:
from langchain.chat_models import ChatOpenAI
from langchain_experimental.plan_and_execute import load_chat_planner,load_agent_executor,PlanAndExecute
from langchain_experimental.agents.agent_toolkits import create_python_agent
In [ ]:
llm = ChatOpenAI(model="gpt-4-1106-preview",temperature=0, cache=False)

planner = load_chat_planner(llm=llm)
executor = load_agent_executor(llm=llm,
                               tools=[PythonREPLTool()],
                               verbose=True)
In [ ]:
agent_executor = PlanAndExecute(planner=planner,
                                executor=executor,
                                verbose=True,
                                handle_parsing_errors = True,
                                return_intermediate_steps=True
                                )
In [ ]:
# plan_and_execute는 아직 미흡한 상태입니다.
# 아래 코드 실행시 에러가 발생할 수 있습니다.

# result = agent_executor.run("""Write a web crawler python console program that gets just the text for any web URL.
#                             Don't add "```" markdown to JSON strings generated in the middle.
#                             Return only python code without the markdown formatting and nothing else.
#                             url= https://namu.wiki/w/%EC%8B%A0%ED%98%9C%EC%84%A0""")
In [ ]:
from langchain.chat_models import ChatOpenAI
from langchain.agents import AgentType
from langchain_experimental.agents.agent_toolkits import create_python_agent
from langchain_experimental.tools import PythonREPLTool


llm = ChatOpenAI(model="gpt-4-1106-preview",temperature=0, cache=False)

python_agent = create_python_agent(
    llm=llm,
    tool=PythonREPLTool(),
    agent_type=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
    verbose=True)

python_agent.run("""Write a web crawler python console program that gets just the text for any web URL.
                            Return only python code without the markdown formatting and nothing else.""")

> Entering new AgentExecutor chain...
I will write a simple Python program that uses the `requests` and `BeautifulSoup` libraries to fetch the text content of a given web URL. The program will make an HTTP GET request to the URL, parse the HTML content, and extract the text.

Action: Python_REPL
Action Input: 
```python
import requests
from bs4 import BeautifulSoup

def get_text_from_url(url):
    response = requests.get(url)
    html = response.content
    soup = BeautifulSoup(html, 'html.parser')
    return soup.get_text()

# Example usage:
# text = get_text_from_url('http://example.com')
# print(text)
```
Observation: 
Thought:I now know the final answer

Final Answer:
```python
import requests
from bs4 import BeautifulSoup

def get_text_from_url(url):
    response = requests.get(url)
    html = response.content
    soup = BeautifulSoup(html, 'html.parser')
    return soup.get_text()

# Example usage:
# text = get_text_from_url('http://example.com')
# print(text)
```

> Finished chain.
Out[ ]:
"```python\nimport requests\nfrom bs4 import BeautifulSoup\n\ndef get_text_from_url(url):\n    response = requests.get(url)\n    html = response.content\n    soup = BeautifulSoup(html, 'html.parser')\n    return soup.get_text()\n\n# Example usage:\n# text = get_text_from_url('http://example.com')\n# print(text)\n```"
In [ ]:
from langchain.prompts import  PromptTemplate
from langchain.chains import LLMChain
from langchain.chat_models import ChatOpenAI
In [ ]:
DEV_PROMPT_TEMPLATE = """You are a software engineer who writes Python code given tasks or objectives.

Please use PEP8 syntax and comments!.

Returned result has no additional explanations or comments are needed, just executiable the plain Python code only.

Come up with a Python code for this task:

{task}



"""

software_prompt = PromptTemplate.from_template(DEV_PROMPT_TEMPLATE)
software_llm = LLMChain(
    llm=ChatOpenAI(model="gpt-4-1106-preview",temperature=0, max_tokens=4000),
    prompt=software_prompt,
    verbose=True
)
In [ ]:
task = """"Write a web crawler python console program that gets just the text for any web URL.
Return only python code without the markdown formatting and nothing else."""

result = software_llm.invoke({"task":task})

> Entering new LLMChain chain...
Prompt after formatting:
You are a software engineer who writes Python code given tasks or objectives.

Come up with a Python code for this task:

"Write a web crawler python console program that gets just the text for any web URL.
Return only python code without the markdown formatting and nothing else.

Please use PEP8 syntax and comments!

> Finished chain.
In [ ]:
print(result['text'])
Certainly! Below is a Python console program that uses the `requests` and `BeautifulSoup` libraries to crawl a web page and extract just the text from it. Please ensure you have these libraries installed before running the code (`pip install requests beautifulsoup4`).

```python
import requests
from bs4 import BeautifulSoup

def get_text_from_url(url):
    """
    This function takes a URL as input and returns the text content of the web page.
    
    :param url: A string containing the URL of the web page to crawl.
    :return: A string containing the text of the web page.
    """
    try:
        # Send a GET request to the URL
        response = requests.get(url)
        # Check if the request was successful
        if response.status_code == 200:
            # Parse the content of the page using BeautifulSoup
            soup = BeautifulSoup(response.content, 'html.parser')
            # Extract text from the parsed HTML
            page_text = soup.get_text(separator='\n', strip=True)
            return page_text
        else:
            print(f"Error: Unable to fetch the page. Status code: {response.status_code}")
            return None
    except requests.RequestException as e:
        print(f"Error: An exception occurred while fetching the page. {e}")
        return None

# Example usage:
if __name__ == "__main__":
    # Replace 'http://example.com' with the URL you want to crawl
    url_to_crawl = 'http://example.com'
    text = get_text_from_url(url_to_crawl)
    if text:
        print(text)
```

This code defines a function `get_text_from_url` that takes a URL, makes a GET request to that URL, and then uses BeautifulSoup to parse the HTML content and extract the text. The `if __name__ == "__main__":` block is used to demonstrate how to call the function with an example URL. Replace `'http://example.com'` with the URL you wish to crawl.

Custom python code builer¶

langchain_ai_06 폴더 아래 python_developer.py에 PythonDeveloper가 구현 되어 있다.
langchain_ai_06 폴더에 dev 폴더에 main.py에 생성된 code가 저장되고 실행된다.
dev폴더에 audit.log가 생성되고 중간 과정이 로그로 남는다.

In [ ]:
import sys, os
from dotenv import load_dotenv
load_dotenv()

from langchain.chat_models import ChatOpenAI
from langchain.prompts import PromptTemplate
from langchain.tools import Tool
from langchain.chains import LLMChain

# project root path를 python path에 추가
current_path = os.path.abspath(".")
print(current_path)
if current_path not in sys.path:
    sys.path.append(current_path)

from langchain_ai_06.python_developer import  PythonDeveloper
/home/shpark/python_dev/openai/langchain
In [ ]:
DEV_PROMPT_TEMPLATE = """You are a software engineer who writes Python code given tasks or objectives.

Use PEP8 syntax and comments!.

Returned result has no additional explanations and no backtick and no markdown format.
Return just executiable the plain Python code only.
Come up with a Python code for this task:

{task}


"""

software_prompt = PromptTemplate.from_template(DEV_PROMPT_TEMPLATE)
software_llm = LLMChain(
    llm = ChatOpenAI(model="gpt-4-1106-preview",temperature=0, cache=False, verbose=True),
    prompt = software_prompt,
)
software_dev = PythonDeveloper(llm_chain=software_llm)

코드 생성만

In [ ]:
code = software_dev.write_code("Write a web crawler python console program that gets just the text for any web URL.")
print(code)
INFO:httpx:HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
import requests
from bs4 import BeautifulSoup

def get_text_from_url(url):
    # Send a GET request to the specified URL
    response = requests.get(url)
    
    # Check if the request was successful
    if response.status_code == 200:
        # Parse the content of the page using BeautifulSoup
        soup = BeautifulSoup(response.content, 'html.parser')
        
        # Extract all text from the parsed HTML
        page_text = soup.get_text(separator=' ', strip=True)
        
        return page_text
    else:
        # Return an error message if the request was unsuccessful
        return f"Error: Unable to retrieve content from {url}. Status code: {response.status_code}"

# Example usage:
if __name__ == "__main__":
    url_to_crawl = input("Enter the URL to crawl for text: ")
    text = get_text_from_url(url_to_crawl)
    print(text)
In [ ]:
# code 저장
software_dev.write_file("web_crawler.py",code)
Out[ ]:
PosixPath('/home/shpark/python_dev/openai/langchain/langchain_ai_06/dev/web_crawler.py')
In [ ]:
# code 실행
# test url : https://example.com/
result = software_dev.execute_code(code, "web_crawler.py")
print(result)
Example Domain Example Domain This domain is for use in illustrative examples in documents. You may use this
    domain in literature without prior coordination or asking for permission. More information...

In [ ]:
# 테트리스 게임 코드 생성
task="""Write a basic tetris game in Python with no syntax errors, properly closed strings, brackets,
parentheses, quotes, commas, colons, semni-colons, and braces, no other potential syntax errors,
and including the necessary imports for the game.
"""

code = software_dev.write_code(task)
print(code)
software_dev.write_file("tetris.py",code)
INFO:httpx:HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
import pygame
import random

# Define the colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
BLUE = (0, 0, 255)
CYAN = (0, 255, 255)
MAGENTA = (255, 0, 255)
ORANGE = (255, 165, 0)
YELLOW = (255, 255, 0)

# Define the shapes of the single parts
tetris_shapes = [
    [[1, 1, 1],
     [0, 1, 0]],
    
    [[0, 2, 2],
     [2, 2, 0]],
    
    [[3, 3, 0],
     [0, 3, 3]],
    
    [[4, 0, 0],
     [4, 4, 4]],
    
    [[0, 0, 5],
     [5, 5, 5]],
    
    [[6, 6, 6, 6]],
    
    [[7, 7],
     [7, 7]]
]

colors = [BLACK, GREEN, RED, BLUE, CYAN, MAGENTA, ORANGE, YELLOW]

# Define the size of the grid
grid_height = 20
grid_width = 10

# Define the size of the window
win_height = 600
win_width = 300

block_size = win_height // grid_height

# Initialize Pygame
pygame.init()

# Set the height and width of the screen
size = [win_width, win_height]
screen = pygame.display.set_mode(size)

# Set the title of the window
pygame.display.set_caption("Tetris")

# Loop until the user clicks the close button
done = False
clock = pygame.time.Clock()
fps = 25
game_over = False
current_piece = None
next_piece = tetris_shapes[random.randint(0, len(tetris_shapes) - 1)]
grid = [[0 for _ in range(grid_width)] for _ in range(grid_height)]

def check_collision(grid, shape, offset):
    off_x, off_y = offset
    for cy, row in enumerate(shape):
        for cx, cell in enumerate(row):
            try:
                if cell and grid[cy + off_y][cx + off_x]:
                    return True
            except IndexError:
                return True
    return False

def remove_row(grid, row):
    del grid[row]
    return [[0 for _ in range(grid_width)]] + grid

def join_matrixes(mat1, mat2, mat2_off):
    off_x, off_y = mat2_off
    for cy, row in enumerate(mat2):
        for cx, val in enumerate(row):
            if val:
                mat1[cy + off_y - 1][cx + off_x] = val
    return mat1

def new_board():
    board = [[0 for x in range(grid_width)] for y in range(grid_height)]
    board += [[1 for x in range(grid_width)]]
    return board

def rotate_clockwise(shape):
    return [[shape[y][x] for y in range(len(shape))] for x in range(len(shape[0]) - 1, -1, -1)]

def rotate_counter_clockwise(shape):
    return [[shape[y][x] for y in range(len(shape) - 1, -1, -1)] for x in range(len(shape[0]))]

def drop_piece():
    global grid, current_piece, next_piece
    if not current_piece:
        current_piece = next_piece
        next_piece = tetris_shapes[random.randint(0, len(tetris_shapes) - 1)]
    else:
        for i in range(1, grid_height):
            if check_collision(grid, current_piece, (0, i)):
                grid = join_matrixes(grid, current_piece, (0, i - 1))
                current_piece = None
                break

def clear_rows():
    global grid
    full_rows = [i for i, row in enumerate(grid[:-1]) if 0 not in row]
    for i in full_rows:
        grid = remove_row(grid, i)

def draw_matrix(matrix, offset):
    off_x, off_y = offset
    for y, row in enumerate(matrix):
        for x, val in enumerate(row):
            if val:
                pygame.draw.rect(screen,
                                 colors[val],
                                 pygame.Rect((off_x + x) * block_size,
                                             (off_y + y) * block_size,
                                             block_size,
                                             block_size), 0)

while not done:
    for event in pygame.event.get():
        if event.type == pygame.QUIT:
            done = True
        elif event.type == pygame.KEYDOWN:
            if event.key == pygame.K_UP:
                current_piece = rotate_clockwise(current_piece)
            if event.key == pygame.K_DOWN:
                current_piece = rotate_counter_clockwise(current_piece)
            if event.key == pygame.K_LEFT:
                if not check_collision(grid, current_piece, (-1, 0)):
                    current_piece = rotate_clockwise(current_piece)
            if event.key == pygame.K_RIGHT:
                if not check_collision(grid, current_piece, (1, 0)):
                    current_piece = rotate_counter_clockwise(current_piece)

    screen.fill(BLACK)

    if not game_over:
        drop_piece()
        clear_rows()
        draw_matrix(grid, (0, 0))
        if current_piece:
            draw_matrix(current_piece, (0, 0))
    else:
        # Game over screen
        font = pygame.font.Font(None, 36)
        text = font.render("Game Over", True, WHITE)
        text_rect = text.get_rect(center=(win_width // 2, win_height // 2))
        screen.blit(text, text_rect)

    pygame.display.flip()
    clock.tick(fps)

pygame.quit()
Out[ ]:
PosixPath('/home/shpark/python_dev/openai/langchain/langchain_ai_06/dev/tetris.py')

langchain Tool을 사용하요 코드 생성하고 바로 실행하기

In [ ]:
code_tool = Tool.from_function(
    func=software_dev.run,
    name = "PythonCodeGenerator", # func의 이름 붙여주기
    description="You are a software engineer who writes Python code given tasks or objectives."
)
code_tool.run("Write a web crawler python console program that gets just the text for any web URL.")
INFO:root:Task:
Write a web crawler python console program that gets just the text for any web URL.
INFO:httpx:HTTP Request: POST https://api.openai.com/v1/chat/completions "HTTP/1.1 200 OK"
INFO:root:Code:
```python
import requests
from bs4 import BeautifulSoup

def get_text_from_url(url):
    # Send a GET request to the URL
    response = requests.get(url)
    
    # Check if the request was successful
    if response.status_code == 200:
        # Parse the HTML content of the page using BeautifulSoup
        soup = BeautifulSoup(response.content, 'html.parser')
        
        # Extract all text from the parsed HTML
        text = soup.get_text(separator='\n', strip=True)
        
        return text
    else:
        # If the request was not successful, return an error message
        return f"Error: Unable to access page, status code {response.status_code}"

# Example usage:
if __name__ == "__main__":
    url = input("Enter a URL to extract text from: ")
    print(get_text_from_url(url))
```
Example Domain
Example Domain
This domain is for use in illustrative examples in documents. You may use this
    domain in literature without prior coordination or asking for permission.
More information...

Out[ ]:
'The code returned the following:\nExample Domain\nExample Domain\nThis domain is for use in illustrative examples in documents. You may use this\n    domain in literature without prior coordination or asking for permission.\nMore information...\n'
In [ ]:
 
In [ ]: