0% found this document useful (0 votes)
22 views4 pages

Komiiii

The document scrapes job listings data from a jobs website and stores it in a Pandas dataframe. It extracts the job title, company and salary from each listing and adds it as a new row to the dataframe. It then prints the first 5 rows of the formatted dataframe.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
22 views4 pages

Komiiii

The document scrapes job listings data from a jobs website and stores it in a Pandas dataframe. It extracts the job title, company and salary from each listing and adds it as a new row to the dataframe. It then prints the first 5 rows of the formatted dataframe.
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 4

1/16/24, 2:01 PM Untitled2.

ipynb - Colaboratory

import requests
import pandas as pd
from bs4 import BeautifulSoup

th = "https://www.jobstreet.co.id/id/teknologi-informasi -jobs"
halaman = requests.get(th)
hasil = BeautifulSoup(halaman.content, 'html.parser')
print(hasil)

xt/css">
0;padding:0;background:#fff}
.a1msqiz body{color-scheme:dark;background:#1C2330}
"_6qlr80 lnocuo18 lnocuo1b"><meta content="true" data-automation="deeplink-metadata-preview" name="branch:deeplink:preview"><meta cont

web.aips-sol.com';
antry-c175cfe.30855';

(s[o]=function(){l.push(arguments)}).q=l)})(window,'sol');
orId":"366b1da2-50e2-404e-a7d1-4eb4a4668437","apiHost":"https://web.aips-sol.com"});

ion="sol-src" src="https://web.aips-sol.com/sol.min.js"></script>
ion="sol-wrapper-bundle" src="/static/ca-search-ui/bundle-6d5f5033.js"></script>
er-state">
_API_NUDGES_ENDPOINT":"https:\u002F\u002Fnudge-api.cloud.seek.com.au","SEEK_SAVED_SEARCHES_PATH":"\u002Fmy-activity\u002Fsaved-searche
appConfig":{"brand":"jobstreet","language":"id","zone":"asia-4","zoneFeatures":
{"QUICK_SEARCH_ENABLED":true,"QUICK_SEARCH_CLASSIFICATI zone":"asia-4","defaultLocale":"id-ID","availableLocales":["en-ID","id-
ID"],"timedBanners":{"privatePolicyBanner":{"privatePolicy":{"d
bstreet","isLoggedIn":false,"loginId":"NULL","siteCountry":"id","siteLanguage":"id","siteSection":"discover","zone":"asia-
4","experime
};

ED_CHUNK " type="application/json">[56,63,918,442,216,343]</script><script LOADABLE_REQUIRED_CHUNKS ext" type="application/j


S id="
main" src="/static/ca-search-ui/houston/runtime~main-96487afcbc3ad8e543dd.js"></script>
main" src="/static/ca-search-ui/houston/vendor.react-01c4135e736b21bdca75.js"></script>
main" src="/static/ca-search-ui/houston/vendor.reactUtils-6d08ef5eb5957eee19c3.js"></script>
main" src="/static/ca-search-ui/houston/vendor.seekUtils-773ceece978d7790229a.js"></script>
main" src="/static/ca-search-ui/houston/vendors-29f0ac2c904e6054d462.js"></script>
main" src="/static/ca-search-ui/houston/main-6d569fca6b70c5e35d6e.js"></script>
en-ID-translations" src="/static/ca-search-ui/houston/en-ID-translations-7bca5940c78fabe2a61c.js"></script>
id-ID-translations" src="/static/ca-search-ui/houston/id-ID-translations-2d7612b3fe47d4e12176.js"></script>
SearchResultPage" src="/static/ca-search-ui/houston/SearchResultPage-8e7f880576c02e90bbf0.js"></script>
avascript">
a.RaygunObject=e,a[e]=a[e]||function(){
guments)},f=b.createElement(c),g=b.getElementsByTagName(c)[0],
ode.insertBefore(f,g),h=a.onerror,a.onerror=function(b,c,d,f,g){
rror(b)),a[e].q=a[e].q||[],a[e].q.push({
ript","//cdn.raygun.io/raygun4js/raygun.min.js","rg4js");

/tags.tiqcdn.com/utag/seek/houston/prod/utag.js"></script>
r,a,n,c,h,_,s,d,k){if(!b[n]||!b[n]._q){for(;s<_.length;)c(h,_[s++]);d=r.createElement(a);d.async=1;d.src=" https://cdn.branch.io/branch

mber') return;

{(h.hj.q=h.hj.q||[]).push(arguments)};
jid,hjsv:5};
ame('head')[0];
ript');r.async=1;
.hjid+j+h._hjSettings.hjsv;

ps://static.hotjar.com/c/hotjar-','.js?sv=');

02,
9,
ost]);

import requests
import pandas as pd
from bs4 import BeautifulSoup

th = "https://www.jobstreet.co.id/id/teknologi-informasi -jobs"
halaman = requests.get(th)
hasil = BeautifulSoup(halaman.content, 'html.parser')
lowkers = hasil.find_all(class_="single-job-ads")

https://colab.research.google.com/drive/1JwRl3h-1nuhTY_jGWroTgp3ineNkAgN9#scrollTo=v_T5Zw5b06ms&uniqifier=1&printMode=true 1
1/16/24, 2:01 PM Untitled2.ipynb - Colaboratory

posisi = ['Karyawan','Gamer','Foto Grafer','Desain','Tukang Bersih']


instalasi = ['AphipPythen','CEO','Dyroth','BUMN','AMAZON']

https://colab.research.google.com/drive/1JwRl3h-1nuhTY_jGWroTgp3ineNkAgN9#scrollTo=v_T5Zw5b06ms&uniqifier=1&printMode=true 2
1/16/24, 2:01 PM Untitled2.ipynb - Colaboratory
gaji = ['200.000.000','300.000.000','400.000.000','600.000.000','700.000.000']

for p in lowkers:
t1 = p.select("h3")
t2 = t1[0].select("a")
posisi.append(t2[0].get_text())

t1 = p.select("p")
t2 = t1 [0].select("a")
try:
instansi.append(t2[0].get_text())
except :
instansi.append("-")

t2 = t1[1].select("span")
try:
gaji.append(t2[1].get_text())
except:
gaji.append(t2[0].get_text())

print(posisi)
print(instalasi)
print(gaji)

 ['Karyawan', 'Gamer', 'Foto Grafer', 'Desain', 'Tukang Bersih']


['AphipPythen', 'CEO', 'Dyroth', 'BUMN', 'AMAZON']
['200.000.000', '300.000.000', '400.000.000', '600.000.000', '700.000.000']

import requests
import pandas as pd
from bs4 import BeautifulSoup

th = "https://www.jobstreet.co.id/id/teknologi-informasi -jobs"
halaman = requests.get(th)
hasil = BeautifulSoup(halaman.content, 'html.parser')
lowkers = hasil.find_all(class_="single-job-ads")

posisi = ['Karyawan','Gamer','Foto Grafer','Desain','Tukang Bersih']


instalasi = ['AphipPythen','CEO','Dyroth','BUMN','AMAZON']
gaji = ['200.000.000','300.000.000','400.000.000','600.000.000','700.000.000']

for p in lowkers:
t1 = p.select("h3")
t2 = t1[0].select("a")
posisi.append(t2[0].get_text())

t1 = p.select("p")
t2 = t1 [0].select("a")
try:
instansi.append(t2[0].get_text())
except :
instansi.append("-")

t2 = t1[1].select("span")
try:
gaji.append(t2[1].get_text())
except:
gaji.append(t2[0].get_text())

lowker = pd.DataFrame({
"Posisi": posisi,
"Instansi": instalasi,
"Gaji": gaji
})
lowker

https://colab.research.google.com/drive/1JwRl3h-1nuhTY_jGWroTgp3ineNkAgN9#scrollTo=v_T5Zw5b06ms&uniqifier=1&printMode=true 3
1/16/24, 2:01 PM Untitled2.ipynb - Colaboratory

1 to 5 of 5 entries Filter

index Posisi Instansi Gaji


0 Karyawan AphipPythen 200.000.000
1 Gamer CEO 300.000.000
2 Foto Grafer Dyroth 400.000.000
3 Desain BUMN 600.000.000
4 Tukang Bersih AMAZON 700.000.000
Show 25 per page

Like what you see? Visit the data table notebook to learn more about interactive tables.

Categorical distributions

2-d categorical distributions

Double-click (or enter) to edit

https://colab.research.google.com/drive/1JwRl3h-1nuhTY_jGWroTgp3ineNkAgN9#scrollTo=v_T5Zw5b06ms&uniqifier=1&printMode=true 4

You might also like