commit 28d101b2bec3e2d77b0439b127b08eb0ea9c6fbd Author: Ashik K Date: Tue Feb 28 11:24:24 2023 +0100 Version alpha0 diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..f546756 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,24 @@ +cmake_minimum_required(VERSION 3.14) +project(testCurlPP LANGUAGES CXX) +set(CMAKE_INCLUDE_CURRENT_DIR ON) +set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +include(FindPkgConfig) +pkg_check_modules(CURLPP REQUIRED curlpp) + +add_executable(fetch_from_ghost + fetch_from_ghost.cc +) + +add_executable(parse_posts + parse_posts.cc +) + +add_executable(parse_pages + parse_pages.cc +) + +target_link_libraries(fetch_from_ghost + ${CURLPP_LDFLAGS} + ) diff --git a/README b/README new file mode 100644 index 0000000..80695a3 --- /dev/null +++ b/README @@ -0,0 +1,9 @@ +You need libcurlpp and rapidjson to build +To build, please do + +mkdir build +cd build +cmake .. +make + + diff --git a/fetch_from_ghost.cc b/fetch_from_ghost.cc new file mode 100644 index 0000000..e415e63 --- /dev/null +++ b/fetch_from_ghost.cc @@ -0,0 +1,61 @@ + +#include +#include + +#include +#include + +#include +#include +#include +#include + +#define DEBUG_LEVEL -1 + +int main(int argc, char *argv[]) +{ + if(argc != 3) { + std::cerr<< argv[0] << ": Usage: " << " apiurl contentkey" << std::endl; + return -1; + } + + char url_posts[128], url_pages[128]; + + snprintf(url_posts, sizeof url_posts, "%s/ghost/api/content/posts/?key=%s&limit=all", argv[1], argv[2]); + snprintf(url_pages, sizeof url_pages, "%s/ghost/api/content/pages/?key=%s", argv[1], argv[2]); + if (DEBUG_LEVEL > 0) std::cout<<"urls to fetch are "< 0) std::cout << request1 << std::endl; + + std::ofstream outfile; + outfile.open ("posts.json"); + outfile << curlpp::options::Url(url_posts) << std::endl; + outfile.close(); + + request2.setOpt(new curlpp::options::Url(url_pages)); + if (DEBUG_LEVEL > 0) std::cout << request2 << std::endl; + + outfile.open ("pages.json"); + outfile << curlpp::options::Url(url_pages) << std::endl; + outfile.close(); + + + + return EXIT_SUCCESS; + } + catch ( curlpp::LogicError & e ) { + if (DEBUG_LEVEL > 0) std::cout << e.what() << std::endl; + } + catch ( curlpp::RuntimeError & e ) { + if (DEBUG_LEVEL > 0) std::cout << e.what() << std::endl; + } + + return EXIT_FAILURE; +} diff --git a/parse_pages.cc b/parse_pages.cc new file mode 100644 index 0000000..581912c --- /dev/null +++ b/parse_pages.cc @@ -0,0 +1,62 @@ +#include +#include + +#include +#include +#include +#include + +int main() +{ + // Open the file + FILE* fp = fopen("pages.json", "rb"); + // Check if the file was opened successfully + if (!fp) { + std::cerr << "Error: unable to open file" + << std::endl; + return -1; + } + + // Read the file + char readBuffer[65536]; + rapidjson::FileReadStream is(fp, readBuffer, sizeof(readBuffer)); + + // Parse the JSON document + rapidjson::Document doc; + doc.ParseStream(is); + + // Check if the document is valid + if (doc.HasParseError()) { + std::cerr << "Error: failed to parse JSON document" + << std::endl; + fclose(fp); + return 1; + } + + // Close the file + fclose(fp); + + if (doc.HasMember("pages") + && doc["pages"].IsArray()) { + const rapidjson::Value& pages = doc["pages"]; + system("mkdir content"); + for (rapidjson::SizeType i = 0; i < pages.Size(); + i++) { + if (pages[i].IsObject()) { + auto post = pages[i].GetObject(); + std::string postid = post["id"].GetString(); + std::cout<< postid <<": "<"; + post_file << post["html"].GetString(); + post_file <<""; + post_file.close(); + } + } + + return 0; +} +} diff --git a/parse_posts.cc b/parse_posts.cc new file mode 100644 index 0000000..3dbf6b5 --- /dev/null +++ b/parse_posts.cc @@ -0,0 +1,62 @@ +#include +#include + +#include +#include +#include +#include + +int main() +{ + // Open the file + FILE* fp = fopen("posts.json", "rb"); + // Check if the file was opened successfully + if (!fp) { + std::cerr << "Error: unable to open file" + << std::endl; + return -1; + } + + // Read the file + char readBuffer[65536]; + rapidjson::FileReadStream is(fp, readBuffer, sizeof(readBuffer)); + + // Parse the JSON document + rapidjson::Document doc; + doc.ParseStream(is); + + // Check if the document is valid + if (doc.HasParseError()) { + std::cerr << "Error: failed to parse JSON document" + << std::endl; + fclose(fp); + return 1; + } + + // Close the file + fclose(fp); + + if (doc.HasMember("posts") + && doc["posts"].IsArray()) { + const rapidjson::Value& posts = doc["posts"]; + system("mkdir content"); + for (rapidjson::SizeType i = 0; i < posts.Size(); + i++) { + if (posts[i].IsObject()) { + auto post = posts[i].GetObject(); + std::string postid = post["id"].GetString(); + std::cout<< postid <<": "<"; + post_file << post["html"].GetString(); + post_file <<""; + post_file.close(); + } + } + + return 0; +} +}