Saturday, November 20, 2021

Node js

NODE JS



/*
Nodejs is javascript engine from Chrome V8 Engine .
Which runs javascript code in the backend
node js is non blocking io module ie., if any lines takes more time , than the cpu does not block other lines and continues execution of other lines
npm - package manager for js .Similar to pip in python
Note: Never push node module dependencies to Git as it is huge
docs: all functions in npm is
- https://nodejs.org/dist/latest-v17.x/docs/api/
- https://nodejs.dev/learn/introduction-to-nodejs
*/
//ubuntu Linux Install
sudo apt update
sudo apt install nodejs
node -v // --version
sudo apt install npm
//Run Node
node file.js //execute file
node //start nodejs shell
//package create and install
npm init //create a project using npm
npm install express --save
npm install -g package_name //install globally
5 //5
5+5 //10
console.log("hi") //print
console.log(__filename) //
//variable
var a=1
const c =1
//Map
const map1 = new Map();
map1.set('a', 1);
console.log(map1.get("a"))
//List
var a = [1, "two", "three", 4];
if (a[0] == 1)
console.log("equal")
else
console.log("not")
//Loop
for (i = 0; i <= a.length - 1; i++) console.log(i)
a.forEach(i => console.log(i))
//function
function f(name){console.log(name)} //f("hi")
//IMPORT OTHER FILE
//library.js
d = { "name": "deepak", "age": 100 }
module.exports = { add, d}
function add(x, y) { return x + y;}
//index.js
const lib = require("./library.js")
console.log(lib.add(4, 2), lib.d);
//common js vs ESM
const library = require("library") //commonjs
import * from library as lib //ESM - only work when you convert the current project to module ( "type": "module")
//IMPORT DEPENDENCY
const dep = require("os")
console.log(os.freemem())
//SYNC VS ASYN LIBRARIES
const fs = require("fs")
fs.writeFileSync("f.txt","data to be written")
fs.writeFile("f.txt","data to be written", () =>{console.log("done writing")}) // intimate user once data is written
//IMPORTATN LIBRARIES
path //folder path
fs //file
url
events
express //to create server alternate to "http" package
//EVENTS
const event = require('events');
class E extends event {}
const e = new E();
e.on('WaterFull', () => {
console.log('Please turn off the motor!');
setTimeout(() => {
console.log('Please turn off the motor! Its a gentle reminder');
}, 3000);
});
console.log("started")
e.emit('WaterFull'); //start event when you receive "Waterfull"
console.log("The script is still running")
// Result
started
Please turn off the motor!
The script is still running
Please turn off the motor! Its a gentle reminder
//EXPRESS - https://expressjs.com/en/starter/hello-world.html
const express = require('express')
const path = require('path')
const app = express()
const port = 3000
//http://localhost:3000/home/hi?id=5
app.get('/home/:param', (req, res) => {
res.send('Hello World!' + req.params.param + req.query.id)
})
app.get('/file', (req, res) => {
res.sendFile(path.join(__dirname, "file.html"))
})
app.post('/file', function(req, res) {
res.send(("Working"));
// console.log('req.body.name', req.body['submit']);
});
app.get('/json', (req, res) => {
res.json({ "name": "deepak" })
})
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
})
view raw node.js hosted with ❤ by GitHub

Friday, October 22, 2021

Scala Advanced

Scala Advanced

Generics

trait A
class B extends A
class C extends B
object LowerBoundGeneric extends App {
  class Test[A >: B](val x: A) //Can have of type A and B not C
  val temp = new B() // new C() = Fail
  val test: Test[B] = new Test[B](temp)
}
object CovariantGeneric extends App {
  class Test2[+A]{ def run[B >: A](element: B)=print("working") }
  val temp2 =new C() // new C() = Fail
  new Test2[B]().run(temp2)
}

Apply

//whereby the compiler converts f(a) into f.apply(a)
object Applytest extends App{
  class Foo(x: Int) { def apply(y: Int) =x+y}
  val f = new Foo(3)
  println(f(4))  // returns 25
}

Partial Function

/*
function  is f: X -> Y,
A partial function =  Does not force f to map every element of X to an element of Y
ie., several subpartial function to handle differnt elements in same data set
new PartialFunction[input , output]
if "isDefined" is true than execute "apply"
orElse, andthen
 */
object Partialtest extends App{
  val sample = 1 to 5
    val isEven = new PartialFunction[Int, String] {
      def apply(x: Int) = x + " is even"
      def isDefinedAt(x: Int) = (x != 0 && x%2 == 0)
    }
  val isOdd: PartialFunction[Int, String] = {
       case x if x % 2 == 1 => x + " is odd"
    }
  val evenNumbers = sample map (isEven orElse isOdd)
  print(evenNumbers)
}

Companion Object

/*
Companion object and its class can access each other’s private members (fields and methods)
Have same name
Same file
 */
object CompanionTest extends App{
  class Person {var name = ""}
  object Person {
    def apply(name: String): Person = {
      var p = new Person()
      p.name = name
      p
    }
  }
  print(Person("Fred Flinstone").name) //Person.apply("Fred Flinstone").
}


Future

/*
Anything inside Future {}, is run in a different thread
Application’s main thread doesn’t stop for Future to Complete
Result of Future is always  Try types: Success or Failure
To make main thread wait scala.concurrent.Await.result(future,15.seconds) is used
isComplete , value ,map , collect
 */
object FutureTest extends App{
  import scala.concurrent.Future
  import scala.concurrent.ExecutionContext.Implicits.global
  import scala.util.{Failure, Success}
  val f1:Future[Int] = Future { Thread.sleep(1000); 21 + 21 }
  while(f1.isCompleted!=true){println("future operation completed ?? -  "+f1.isCompleted)}
  println(f1.value)
  val f2:Future[Int]=f1.map(i => i+1)
  f2.onComplete {
    case Success(value) => println(s"Got the callback, value = $value")
    case Failure(e) => e.printStackTrace
  }
}



Implicit

object ImplicitTest extends App{
  case class Person(name: String) {def greet = println(s"Hi, my name is $name")}
  implicit def fromStringToPerson(name: String) = Person(name)
  "Peter".greet
}

Thursday, October 14, 2021

IBMCLOUD

 IBMCLOUD

Index:

  1. Basics
  2. Pre-Req
  3. Free CommandLine Tool
  4. Create Free Application
  5. API Keys
  6. Getting oAuth Tokens
    1. Standalone
    2. Ibm CLI tool
  7. Create AI application
  8. Cloudant Database
    1. Fetch the Clouddant Document from API
  9. Functions
  10. API GateWay
  11. Simple ETL from COS to DB2
  12. Copy ETL using REST
  13. Run Spark Job on COS 

Basics

  • IAM = Shared Account
  • Provisioning= Create an App
  • Helm Charts = Add Addons to the Provisioned App
  • There are 3 Types of App
    • Classic Infrastructure  - For Individuals
    • IAM Managed Services - For Enterprise / Resource Groups 
    • Cloud Foundary - Open Source 

Pre-Req

  • open ibmcloud
  • create a free account
  • Login as directed

Free CommandLine with python3.8+

  • Login to ibmcloud
  • On the tool bar of Landing Page, Click on IBMCloud Shell
  • $python3

Create Free Application

  • Login to ibmcloud
  • click on Catalog
  • Search for CloudFoundary
  • Click on Cloud Foundary Application >Click on Create
  • Add details : Resource ,App Name etc., 
  • Click on Create 
  • Goto homepage > Resource List > CloudFoundaryApp > Click on the app 
  • Click on link Visit app URL

API Keys

Getting oAuth Tokens 


1) Standalone installer (https://cloud.ibm.com/docs/cli?topic=cli-getting-started)

  • Run $curl -fsSL https://clis.cloud.ibm.com/install/linux | sh #Linux
  • ibmcloud login #ibmcloud login --sso
  • ibmcloud iam oauth-tokens
  • copy the result
  • SET IAM_TOKEN=<paste here>
  • Use "Authorization: Bearer IAM_TOKEN"

2) IBMCLOUD CLI

  • Log in to IBM Cloud 
  • select Manage > Security > Platform API Keys.
  • Create an API key for your own personal identity, 
  • copy the value
  • Run below
    $curl -X POST 'https://iam.cloud.ibm.com/identity/token' \
    -H 'Content-Type: application/x-www-form-urlencoded' \
    -d 'grant_type=urn:ibm:params:oauth:grant-type:apikey&apikey=<MY_APIKEY>'/

Response :
 {
        "access_token": "eyJraWQiOiIyMDExxxxxxxxxxx
  • copy access token and use as below
  • Syntax-
    • Authorization: Bearer <access_token_value_here>. 
  • example-
    • Authorization: Bearer eyJraWQiOiIyMDE3MDgwOS0wMDoxxxxxxxxx

Create a AI Application - Language Translator

  • Login to ibmcloud
  • goto to Catalog
  • filter :Pricing plan=lite 
  • Category : Select AI /MAchine Learning
  • Click on Language Translator 
  • Create
  • Check on consent on Agreement 
  • Create
  • Copy the api-key . url  under : Language Translator >Service Credentials
  • Replace api-key and url  (More REST calls : Language Translator >GettingStarted)
curl -X POST --user "apikey:{apikey}" \ --header "Content-Type: text/plain" \ --data "Language Translator translates text from one language to another"
"{url}/v3/identify?version=2018-05-01" 
  • open Ibmcloud Shell from the ibmcloud tool bar
  • Run the new Command

Cloudant Database 

  • Login to IBMCloud
  • Goto Catalog
  • Select and Create a Cloudant Instance
  • Open the Cloudant Instance provisioned from Resource List > Services and Software >Cloudant
  • Click on Manage > Launch Dashboard
  • Create Database > test >Click on Create
  • Open test DB > Design Document > New Doc > add new json key value 
eg:
{
  "_id": "ce9575de70477c932e222bf5b6bd7fea",
  "name": "deepak"
}
  • Click on Create Document

Lets fetch this document from API 

  • Under Cloudant page > Service Credentails > Create New Role > Manager >Add
  • Open the New Service Credentails Created , Note down apikey , url
  • Open ibmcli  from ibmcloud tool bar (https://cloud.ibm.com/docs/account?topic=account-iamtoken_from_apikey&interface=api)
  • $curl -X POST 'https://iam.cloud.ibm.com/identity/token' -H 'Content-Type: application/x-www-form-urlencoded' -d 'grant_type=urn:ibm:params:oauth:grant-type:apikey&apikey=<MY_APIKEY>'
  • Copy the Token generated
  • Run below commands
API_BEARER_TOKEN=<paste token here>
curl -H "Authorization: Bearer $API_BEARER_TOKEN" -X GET "{url}/test/{_id from cloudant}"

Other Api:

curl -H "Authorization: Bearer $API_BEARER_TOKEN" -X PUT /{db}" #Create DB
curl -H "Authorization: Bearer $API_BEARER_TOKEN" -X PUT /{db}/{doc_id}" Create Document
curl -H "Authorization: Bearer $API_BEARER_TOKEN" -X GET "{url}/test/{_id from cloudant}" #Read Document

Ref : 

https://cloud.ibm.com/docs/account?topic=account-iamtoken_from_apikey&interface=api
https://cloud.ibm.com/docs/Cloudant
https://cloud.ibm.com/apidocs/cloudant#getdocument

Functions

  • Login to IBMCloud
  • catalog > search and click Functions
  • Click on StartCreating
  • Select QuickStart templates > Hello World
  • select python3 > clk Deploy
Note:
TO modify the python code: Function/Actions/helloworld

Test1:

  • click Invoke:Result - {"greeting": "Hello stranger!"}
  • click Invoke with parameters:{"name":"deepak"}
  • click Invoke :Result- {"greeting": "Hello deepak!"}

Test2

  • Open ibmCloud Cli
  • curl -u xxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx \
  • -X POST https://eu-gb.functions.cloud.ibm.com/api/v1/namespaces/j.thepac%40gmail.com_dev/actions/hello-world/helloworld?blocking=true

Test3

Open ibmcloudcli
$python3    #open pythonshell
url="https://eu-gb.functions.cloud.ibm.com/api/v1/namespaces/j.thepac%40gmail.com_dev/actions/hello-world/helloworld?blocking=true"
auth=("xxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxx","xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")
data={"name":"deepak"}
r=requests.post(url,json=data,auth=auth,verify=False)
r.content

API GateWay (Proxy) :

U can create a proxy link for "https://eu-gb.functions.cloud.ibm.com/api/v1/namespaces/j.thepac%40gmail.com_dev/actions/hello-world/helloworld?blocking=true" link by Creating ApiGateWay and providing the above url .

Simple ETL from COS to DB2

Pre- Req:

DB2:

  • Make sure u have created a DB2 instance in IBMCLoud
  • Create a table in DB2 (do not insert any records)
  • CREATE TABLE table_name (col1 int, col1 varchar(255)); -- successfully created
  • In Db2 Ui > Data icon >  Tables 
  • Click on the scheme
  • check if the table is created
    • Test it
      • Syntax : Select * from scheme.table;
      • Example:Select * from DXC02390.table_name;
  • note down the Scheme name and table name
  • Click on about icon in DB2 UI 
  • Note down from "<crn ..........::>" 

Cloudant:

  • Create a Cloudant Object Storage (COS) in IBM Cloud 
  • Create a Bucket 
  • Add a parq File , with scheme similar to the above Table created (use apache spark to create the file locally and drag and drop)
  • select the uploaded parq file > Object Details > copy Object SQL URL

Steps:

  • Create SQL Query instance in ibmcloud 
  • Run the below command to copy the data from COS to DB2
Syntax :
SELECT * FROM <Object SQL URL>  STORED AS PARQUET INTO crn:xxxxxxx:/scheme.table PARALLELISM 2

Example:
SELECT * FROM cos://jp-tok/cloud-object-storage-7d-cos-standard-gsi/test2Cols.parquet STORED AS PARQUET
INTO 
crn:v1:bluemix:public:dashdb-for-transactions:eu-gb:a/e31b7085afca4ab8b6ac9b1077cd8af9:9257e5bc-49f0-43a1-b776-f7a0ff41b2b6::/DXC02390.MONOREPO_POC PARALLELISM 2

Copy ETL using REST 

Pre-Req:  Simple ETL from COS to DB2

curl -X POST 'https://iam.cloud.ibm.com/identity/token' \
    -H 'Content-Type: application/x-www-form-urlencoded' \
    -d 'grant_type=urn:ibm:params:oauth:grant-type:apikey&apikey={Create APi Key from Manage >Access>Api keys}'

Copy Response Token and save it to 
API_TOKEN = "xxxxxx"
or 
SET API_TOKEN="xxxxxx"

Get Current Jobs

curl -XGET   \
--url "https://api.sql-query.cloud.ibm.com/v3/sql_jobs?type=batch&instance_crn=crn:v1:bluemix:public:sql-query:in-che:a/e31b7085afca4ab8b6ac9b1077cd8af9:29b693b9-b195-4549-a2b0-03c93a26e3d1::"  \
 -H "Accept: application/json"  \
 -H "Authorization: Bearer <API_TOKEN>" 

#type=batch or type=stream

#Copy from 1 parq to another
curl -XPOST  \
--url "https://api.sql-query.cloud.ibm.com/v3/sql_jobs?instance_crn=crn:v1:bluemix:public:sql-query:in-che:a/e31b7085afca4ab8b6ac9b1077cd8af9:29b693b9-b195-4549-a2b0-03c93a26e3d1::"  \
-H "Accept: application/json"  \
-H "Authorization:Bearer <API_TOKEN>"  \
-H "Content-Type: application/json"   \
-d '{"statement":"SELECT * FROM cos://jp-tok/cloud-object-storage-7d-cos-standard-gsi/test2Cols.parquet STORED AS PARQUET INTO cos://jp-tok/cloud-object-storage-7d-cos-standard-gsi/test2Cols_result"  }'

Run Spark Job on COS Data

  • login to IBMCLOUD
  • Goto Catalog > Search for Watson Studio
  • Agree to terms and conditions> Click on Create 
  • Click On next >Next > click Create Watson Studio
  • Click on Projects > New Project >Empty Project
  • Add to Project > Notebook 
  • Select Runtime > python (least configuration)
!pip -q install ibmcloudsql
import ibmcloudsql

cloud_api_key="Create api key from Manage"
sql_crn="crn of SQL Query Instance"
sql_cos_endpoint="cosendpoint of bucket/result_prefix"
query="right click on the COS parq file and click on SQL Query"

sqlClient = ibmcloudsql.SQLQuery(cloud_api_key, sql_crn, sql_cos_endpoint) 
#sqlClient =ibmcloud.sqlSQLQuery(my_ibmcloud_apikey, my_instance_crn)

res=sqlClient.run_sql(query)
  • You can create a job and run the notebook at a specific time and results can be seen in the Jobs tab.

Note :

  1. Any file you drag and drop in Notebook will automatically get saved into COS . 
  2. Click on insert code to add spark code to work on the Dataframe.


Ref:
  1. https://cloud.ibm.com/docs/sql-query
  2. https://medium.com/codait/analyzing-data-with-ibm-cloud-sql-query-bc53566a59f5
  3. https://cloud.ibm.com/docs/sql-query?topic=sql-query-data-transport-automation-to-db2-on-cloud
  4. https://www.ibm.com/cloud/blog/announcements/automate-serverless-data-pipelines-for-your-data-warehouse-or-data-lakes
  5. https://dataplatform.cloud.ibm.com/exchange/public/entry/view/4a9bb1c816fb1e0f31fec5d580e4e14d
  6. https://cloud.ibm.com/docs/sql-query?topic=sql-query-sql-reference
  7. https://video.ibm.com/playlist/633112 #https://www.youtube.com/watch?v=s-FznfHJpoU
  8. https://cloud.ibm.com/apidocs/sql-query-v3#introduction #REST
  9. https://cloud.ibm.com/apidocs/db2-on-cloud/db2-on-cloud-v4
  10. https://video.ibm.com/playlist/633075 #jupyter notebook
  11. https://cloud.ibm.com/docs/AnalyticsEngine?topic=AnalyticsEngine-working-with-sql#running-spark-sql-with-scala
  12. https://github.com/IBM-Cloud/sql-query-clients
  13. https://github.com/IBM-Cloud/sql-query-clients/tree/master/Python

Monday, October 11, 2021

Bazel

 Creating Bazel Project 

Fast Step up Guide

1.  make sure bazel is installed in your computer 
2.  create a new folder as Project
3.  cd inside the project folder
4.  create a new "WORKSPACE" file
5.  create python/Program folder
6.  cd to Program
7.  Create a new file BUILD file:

    package(default_visibility = ["//visibility:public"])
    py_binary(
        name = 'hello', #anyname
        main = 'hello.py', #reference path eg:  parentfolder.file
        srcs= ['hello.py'], #filename 
    )

8.  $echo "print('hi')" > hello.py 
9.  make sure ur in folder containing BUILD file
10. $bazel run hello

Bazel has default setting for Python and Java ie., u can start with empty WORKSPACE and run python/java source files .

Refer for other languages:
https://docs.bazel.build/versions/4.2.1/rules.html

Other Languages (example scala):


You need to configure workspace - http_archive , skylib , language specific rules , Maven .

  1. start with http_archive -support to download package from https
    1. load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
  2. Download skylib - support for shell command 
    1. skylib_version = "0.8.0"
      http_archive(
      name = "bazel_skylib",
      type = "tar.gz",
      url = "https://github.com/bazelbuild/bazel-skylib/releases/download/{}/bazel-skylib.{}.tar.gz".format (skylib_version, skylib_version),
      sha256 = "2ef429f5d7ce7111263289644d233707dba35e39696377ebab8b0bc701f7818e",
      )
  3. Load 
    1. rules_scala  : like scala_binary,scala_test  etc., to use in BUILD file
    2. scala_config : Config scala version
    3.  scala_register_toolchain  : For using the jar file build from 1 languge as input to another
    4. scala repositories : to download default libraries for scala 
  4. Set maven as third party repo

IntelliJ Setup

1. Make sure intelliJ has bazel plugin installed
2. import above project as basel project
3. create new
4. next ( if u already have .ijwb/ folder created , make sure it is deleted)
5. done

Common Commands :

  • bazel build target #target can be name of build or //path of package:target
  • bazel run target
  • bazel test target
  • bazel coverage target
  • bazel query deps(target)
  • bazel fetch target
  • bazel version
  • bazel clean --expunge

Advantages:

  • Google product
  • Language independent
  • Platform Independent (MAc, Linux etc)
  • Hermatic ( build exactly every time )
  • Cross Langauge Dependencies  (Python libarary can call Java binary etc., )
  • Large Code base
  • Caches Dependencies
  • Parallel Builds
  • Enable Remote  (download Dependencies at remote)
  • Dependency Tree Feature
  • Query Dependencies

Cons : 

  • Network Dependency (1st time and new Dependency)
  • Enlist every Dependency (ie., If Dependency is using another Dependency. It has to be decalared)
  • Manually Decalaring all Dependencies might have version conflicts when 1 library uses 1 version and other another

Features

  • Bazel follows python syntax
  • Workspace: Workspace is a Folder with WORSPACE file also called as Bazel Repo.
  • package :Is a Folder inside Bazel Repo with BUILD file .This folder contains Src code files and other files
  • Target :Everything inside your packages can be considered target
  • Label:The nomenclature of a target is known as a label. It’s just a way to recognize different targets 
  • .bazelrc :Settings that are taken into account every time Bazel builds your project.
  • buildifier : Used to ensure that all your build files are formatted in a similar fashion

WORSPACE file

Enlists all external repo the bazel repo is dependent on

 Example :

workspace(name="intro_to_bazel") #name of the workspace

#load("filename","method")
load(“@bazel_tools//tools/builds_defs/repo:git.bzl”, "git_name") 
git_name(
    name= "com_github_xxx",
    commit="xxxxxxxxxxxxxxxx",
    remote="https://github.com/xxx" 
)

Rule Definition in WORSPACE

  • Example:load("//foo/bar:file.bzl", "some_library")
  • This code will load the file foo/bar/file.bzl and add the some_library symbol to the environment. 
  • This can be used to load new rules, functions or constants (e.g. a string, a list, etc.).
  • *_binary rules build executable programs in a given language. After a build, the executable will reside in the build tool's binary output tree 
  • *_library rules specify separately-compiled modules in the given programming language

  • *_test rules are a specialization of a *_binary rule, used for automated testing

Note :

  • https://github.com/bazelbuild/examples/tree/main/java-tutorial
  • In this project WORKSPACE is empty because  Native rules ship with the Bazel binary and do not require a load statement. Native rules are available globally in BUILD files.
  • But for scala ,python etc u need to include load statements in workspace and use them in Build files

Steps:

  • Open link https://github.com/bazelbuild
  • select repos u need for creating ur project
  • Example if u want to add "bazel-skylib" (Provides functions , file paths, and data types in build file)

####### WORSPACE ########

load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
bazel_skylib_workspace()


BUILD File
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//lib:shell.bzl", "shell")
p = paths.basename("foo.bar")
s = shell.quote(p)

  • Since scala does  not directly ship with bazel u need to include the "rules_scala" from bazelbuild in Workspace
  • And Use scala_binary , scala_library scala_test etc., to build and test 

BUILD

  • Folder with BUILD is called Package
  • Contains rules. scala_binary, java_binary etc.,

Example:

common/BUILD 
scala_library(
    name = "common",
    srcs = glob(["*.scala"]),
    visibility = ["//visibility:public"],
)


source/BUILD
scala_binary(
    name = "eid",
    srcs = glob(["filename.scala"]),
    main_class = "com.company.project.filename",
    deps = [
        "//path/common",
    ]
)

  • xxx_library takes sources , targets and label (ie., path for other Bazel Packages)
  • xxxx_library create a libarary 
  • //packagename:target
    • // - root
    • packagename - name of the Builds
    • target - particular target inside a package

  • srcs dependencies :Files consumed directly by the rule or rules that output source files.
  • deps dependencies: Rule pointing to separately-compiled modules providing header files, symbols, libraries, data, etc.
  • data dependencies:A build target might need some data files to run correctly.

Query Dependencies 

  • bazel query "deps(//foo)"
  • bazel query "allpaths(//foo, third_party/...)"
  • bazel query --noimplicit_deps 'deps(//package:target)' --output graph | dot-Tpng >graph.png
#if u are already inside the package
  • bazel query --noimplicit_deps 'deps(target)' --output graph | dot-Tpng >graph.png 
  • bazel query --noimplicit_deps 'deps(microservice)' --output graph | dot-Tpng >graph.png
  • bazel query --noimplicit_deps 'deps(microservice)' --output graph > simplified_graph.in

  • bazel query 'foo/...' --output package # What packages exist beneath foo?
  • bazel query 'kind(rule, foo:*)' --output label_kind #What rules are defined in the foo package?
  • bazel query 'kind("generated file", //foo:*)' #What files are generated by rules in the foo package?
  • bazel query 'attr(generator_function, foo, //path/to/search/...)' #What targets are generated by starlark macro foo?
  • bazel query 'buildfiles(deps(//foo))' | cut -f1 -d: #What's the set of BUILD files needed to build //foo?
  • bazel query 'tests(//foo:smoke_tests)' #What are the individual tests that a test_suite expands to?
  • bazel query 'kind(cc_.*, tests(//foo:smoke_tests))' #Which of those are C++ tests?
  • bazel query 'attr(size, small, tests(//foo:smoke_tests))' #Which of those are small? Medium? Large?
  • bazel query 'filter("pa?t", kind(".*_test rule", //foo/...))' #What are the tests beneath foo that match a pattern?
  • bazel query path/to/file/bar.java --output=package #What package contains file path/to/file/bar.java?
  • bazel query path/to/file/bar.java #What is the build label for path/to/file/bar.java?
  • bazel query 'buildfiles(deps(//foo:foo))' --output package #What packages does foo depend on?
  • bazel query 'deps(foo/... except foo/contrib/...)' --output package #What packages does the foo tree depend on, excluding foo/contrib
  • bazel query 'kind(genproto, deps(bar/...))' #What genproto rules does bar depend upon
  • bazel query 'kind("source file", deps(//path/to/target/foo/...))' | grep java$ #What file dependencies exist
  • bazel query 'deps(//foo) except deps(//foo:foolib)' #What targets does //foo depend on that //foo:foolib does not?
  • bazel query 'somepath(bar/...,groups2/...:*)' #Why does bar depend on groups2

Rules 

Read output of build file in another build files

Ref:


Sunday, October 10, 2021

Java Package Names and Naming Convention:

 Java Package Names and Naming Convention:

  • If you're just doing personal projects where nobody else will use the code, then you can make any name .
  • Don't make up something that starts with com. or net. or other top-level domain though, because that would imply that you own the domain name (ie. using com.john as your package name just because your name happens to be John is not a good idea).
  • The domain-name-backwards convention is there to prevent name collisions. Two different companies with the same product name will have different namespaces so everything works fine.

Ref:

  • https://stackoverflow.com/a/292175
  • https://docs.oracle.com/javase/tutorial/java/package/namingpkgs.html
  • https://stackoverflow.com/a/6247924

Thursday, September 30, 2021

Scala - Requests

Scala - Requests 


link:

https://index.scala-lang.org/com-lihaoyi/requests-scala/requests/0.6.9?target=_3.x


"com.lihaoyi" %% "requests" % "0.6.5" // sbt
compile "com.lihaoyi:requests_2.12:0.6.5" //gradle
val r = requests.get("https://api.github.com/users/lihaoyi")

r.statusCode
// 200

r.headers("content-type")

Saturday, September 11, 2021

Scala -Java : Convert String to Json

Scala -Java : Convert String to Json 

Code (Maven / https://mvnrepository.com/artifact/com.google.code.gson/gson):

import com.google.gson.{Gson, JsonParser}
val json="""{"hello": "world", "age": 42}"""
 val parser:JsonParser= new JsonParser();
val res= parser.parse(json).getAsJsonObject()
println(res.get("hello")) // world

//read from file
//val path="/path/file.json"
//val lines = scala.io.Source.fromFile(path, "utf-8").getLines.mkString

Code 2 (Requires Lift Web - Maven):

import net.liftweb.json._
import net.liftweb.Serialiazation.write
case class Address(city:String,Country:String)
case class Person(name:String ,address:Address)
implicits def formats=DefaultFormats
print(write(Person("Sam",Address("NY","US"))

python : Setup VS Code

 python : Setup VS Code

Steps:

  • Make sure VS code is installed
  • Install python extension from Microsoft
  • run this in shell

import sys 

for pth in sys.path:print(pth)

  • Goto > Preferences>Settings >users 
  • Search for Python  > Python Path >add path "/usr/lib/python39/python.exe"
  • At the bottom you should see the interpreter name

Pylint

  • Ctrl+shift+P
  • Search for Python:Select Linter
  • Choose pylint and enable

Saturday, August 21, 2021

Bootstrap

 Bootstrap


Pre-Req:

  • downloadand install VSCode
  • Start with base html 
  • https://getbootstrap.com/docs/5.1/getting-started/introduction/#starter-template
  • Copy paste individual components from Bootstrap Document

Class

  • Used to add extra features(like color , size etc)
  • classes + spaces = add new property to the element
  • <button type="button" class="btn btn-secondary btn-sm">Primary</button>

ID : Used to identify the component

Image 

  • img src="https://source.unsplash.com/400x400/?code"
  • "https://source.unsplash.com/1200x300/?van"
  • If u have a file place in the same folder and  img src="1.jpg"
  • Dont put components inside other components Eg:Button inside paragraph
  • Image Centering - <img src="..." class="rounded mx-auto d-block" alt="..."
  • https://getbootstrap.com/docs/4.0/content/images/

Image Background

<head>
<style>
.page-holder {
min-height: 100vh;
}
.bg-cover {
background-size: cover !important;
}
</style>
</head>

<div style="background: url(https://i.postimg.cc/ZnHTP71s/aircraft-airplane-boat-1575833.jpg)" class="page-holder bg-cover">
<div class="container py-5">
<header class="text-center text-white py-5">
<h1 class="display-4 font-weight-bold mb-4">Contact us</h1>
<p class="lead mb-0">Address</p>
</header>
</div>
</div>

django:

#make sure css is added
#file is inside static
settings.py :
STATIC_URL = '/static/'
STATICFILES_DIRS = ( os.path.join('static'), )

<div style="background: url('{% static 'file_inside_static.jpeg'%}')" class="page-holder bg-cover">


Color 

  •     <button type="button" class="bg-danger">name</button>
  •     bg-danger = Red
  •     bg-warning = Yellow
  •     bg-success = Green
  •     bg-primary = blue
  •     bg-light 

    text Color

  •     <p class="text-white bg-dark">.text-white</p>
  •     <p class="text-light">.text-black-50</p>
  •     <p class="text-white-50 bg-dark">.text-white-50</p>
  •     <p class="text-success">.text-success</p>

container

  •  <div class="container"></div>
  •  Bootstrap > Container > Grid
  •     Bootstrap gives max of 12 columns in a row
  •     If "col-3" = use 3 column to make 1 column out of 12 .So now user is left wtih 9 columns 

    Example :

    <div class="container">

        <div class="row bg-light">

            <div class="col-10 bg-danger"> 10 of 12</div>

             <div class="col-2 bg-danger"> remaining 2 0f 2</div>

        </div>

    </div>

  •     It can be configured for different mobile devices as well 
  •     if "col-md-10 col-sm-6" means in medium device use size 10 and small device use size 6

Margins , Spacing and Padding:

  •     Create a new container
  •     add "my-1" inside the class
  •     U can give margins 1-4
  •     my-l = left
  • <div class ="row gx-5"> = horizontal gutters
  • <div class ="row gy-5"> = vertical gutter
  • <div class ="row g-5"> =horz+vert gutter
  • <div class="container">
      <div class="row row-cols-2 row-cols-lg-5 g-2 g-lg-3">
  • <div class ="row px-5"> =padding
  • https://getbootstrap.com/docs/5.1/utilities/vertical-align/
  • https://getbootstrap.com/docs/5.1/utilities/spacing/
  • https://getbootstrap.com/docs/5.1/utilities/position/

Using Examples (https://getbootstrap.com/docs/5.1/examples/):

  •     goto Examples in Bootstrap
  •     open any Example
  •     Inspect the element in Browser
  •     Add a new container in the html 
  •     Paste the element inside the container

 Centering

    add following to class "d-flex jusify-content-center align-items-center"

Sequencing:

add this to class "order-md-1" /"order-md-2"

Linking html files :

href="about.hml" <!--about.html is a file in the same folder-->

Login Box (Modal):

  • Pops up a message box on click on of the button
  • https://getbootstrap.com/docs/5.1/components/modal/#static-backdrop

ThumbNail

https://stackoverflow.com/a/8858052

Sunday, June 13, 2021

Important Resources

Important Resources

Open Applications

  • OSX - open is a universal opener in OSX
  • Linux - has xdg-open (open bashrc > add "alias open = xdg-open" > save file >source ./bashrc)
  • Windows - use start

Online Virtual terminals

  • Google API Dashboard > Cloud Shell
  • Docker playground

ReadMe Editor

Text to Table

https://www.tablesgenerator.com/text_tables

Host website Free Server

Host website non Server

  • https://www.netlify.com/
  • github (make sure home page is index.html and linked pages have path "/repo_name/about.html") and (repo>settings >pages>select branch and theme)

Image Hosting and server:

  • Gdrive
  • https://imgbb.com/
  • Github >Open any Repo > Create Issue > Drag and Drop Image > Copy the Link created

Banner / Header

Icons

Images:

Image from URL:

Method 1

Method 2

  • Search image in Google images
  • open source website where u have the image
  • Browser > Inspect
  • Copy the url

Free BootStrap Code sniffet

Free Templates ( Resume , Poster , Letterhead etc., )

Free GIFs:

Free Api

Free Animation:

Free CSS:

https://animista.net/

Mobile View of Websites:

https://ready.mobi/

Kafka Online

https://customer.cloudkarafka.com/login

DB Client

https://hub.docker.com/_/adminer

Thanks to :

https://www.youtube.com/watch?v=qEw8qlRX05A&t=28s

Sunday, May 16, 2021

Kubernetes : Basics


Kubernetes : Basics


Kubernetes

Problem with VM:

  • Virtual machines comes with unwanted apps and services Eg: Browser
  • All process / Threads were not is user control .
  • Memory allocation was not in user control.
  • Syncing Virtual machines is not easy
  • Lot of configuration was needed
  • To delete and reconfigure was a nightmare.

Kubernetes

  • configure, automate, and manage applications using containers Image
  • Open Source
  • Facility for scheduling of containers
  • Creation, deletion, and movement of containers
  • Easy scaling of containers
  • Monitoring

kubernetes local :

Terminoloy :

- Cluster: A collection of nodes(Computer) represent a cluster.
- Pod:
	- Runs Container Image
	- unique ip address
	- Pods are the smallest unit in kubernetes
	- private, isolated network.
- Container Image : code 
- kubectl: Command creates a proxy ,forwarding communications into the cluster using API. 

SetUP (Project )

minikube start
minikube stop
kubectl create namespaces somename                                                                              
kubectl config set-context --current --namespace=somename

Basic 1 Pod :

kubectl run -it alpine --image=alpine   # Create a Pod
kubectl get pod
kubectl exec -it <pod-id> bash #exit
kubectl logs <podname>

Basic with Deployment with 2 Pods

# Create a Deployment with 2 pods
kubectl create deployment mydep --image=nginx --replicas=2  
		#configmap(env) , secret , cronJob
kubectl get deployment
kubectl describe deployment mydep

# Create a service to expose deployment
kubectl expose deployment mydep --port=80 --type=LoadBalancer
kubectl get services
kubectl describe service mydep

#Port forward to outside world
kubectl port-forward svc/mydep 8080:80&
#open browser > localhost:8080

kubectl get deployment mydep -o yaml >> mydep.yml
kubectl get service mydep -o yaml >> mydepsvc.yml

kubectl delete deployment mydep
kubectl delete service mydep

kubectl apply -f mydep.yml
kubectl apply -f mydepsvc.yml
kubectl port-forward svc/mydepsvc 8080:80&

Env Variables

kubectl create configmap env --from-literal=key=1
kubectl get configmap env -o yaml
kubectl get configmap env -o=jsonpath='{.data.key}'

Secret

kubectl create secret my-secret literal-token --from-literal pass="123"
kubectl get secret my-secret -o=jsonpath='{.data.pass}' | base64 --decode

CronJob /Job

kubectl create cronjob hello --image=busybox:1.28  --schedule="*/1 * * * *" -- echo "Hello World"
kubectl create job hello --image=busybox:1.28 -- echo "Hello World"

Export Yaml kubectl create deployment html --image=nginx --dry-run=client -o yaml > deployment.yaml # Volume Mount

Stateful Vs Stateless

Stateful

  • All Databases ie., Relational and Non-Relational
  • All replicas are synced (maintain Data Consistency)
  • Mandatory Persistant Volumes ( Fault Tolerance)
  • Have Serial ID alloted from 0 to n
  • Complex to create
  • Not Recommened to use Databases in Kubernetes*
  • Uses Master and Slave :
    • Master is id-0
    • only one allowed to change data
    • Data gets transfered from id-0 > id-1> id-2 ....

Stateless:

  • Eg: Rest Api Server
  • Not Synced
  • Not mandatory Persistant Volumes

YML

	apiVersion: v1
	kind: Pod
	metadata:
	  name: myapp
	spec:
	  containers:
	    - name: myapp
	      image: dockerimage
	      command: [ "env" ] # print env var
  • SERVICE Git:
    • Service /portForward (Testing only)
    • ClusterIp (Within Cluster)
    • Loadbalancer (Commonly Used to expose Application)
    • Ingress

Simple Service/port-forward (Used for Testing Only - )

apiVersion: v1
kind: Service
metadata:
  name: newservice
spec:
  selector:
    app: targetappname
  ports:
  - port: 8085
    targetPort: 8085
    name: httpTest

#kubectl get service
#kubectl port-forward svc/newservice 8085:80
#kubectl delete service newservice

ClusterIP - Within Cluster Only http://localhost:8080/api/v1/proxy/namespaces/<NAMESPACE>/services/<SERVICE-NAME>:<PORT-NAME>/

apiVersion: v1
kind: Service
metadata:  
  name: clusterservice
spec:
  selector:    
    app: targetappname
  type: ClusterIP
  ports:  
  - name: http
    port: 80
    targetPort: 80
    protocol: TCP

#kubectl proxy --port=8080

LoadBalancer - load balancer functionality - Only Used in Cloud Services ( AWS, GCP ,Azure ..) - Each service needs new LoadBalancer and new IP address.

apiVersion: v1
kind: Service
metadata:
  name: myloadbalancer
spec:
  type: LoadBalancer
  selector:
    app: targetappname
  ports:
    - port: 8000
      targetPort: app_port

Ingress (Similar to port-forward / Simple Service ) - Ingress is an object that allows access to your Kubernetes services from outside - Needs ingress controller running. - Ingress controller ( will not run by default ) - Ingress Resource

			kubectl get pods --all-namespaces | grep ingress
			kubectl get service --all-namespaces | grep ingress
			kubectl get Ingress ingress-name
			kubectl delete ingress ingress-name

ConfigMap (Environment variables)

apiVersion: v1
kind: ConfigMap
metadata:
  name: myconfig
data:
  TEST_ENV: test
---
apiVersion: v1
kind: Pod
metadata:
  name: myapp
spec:
  containers:
    - name: busybox
      image: k8s.gcr.io/busybox
      command: [ "env" ] # print env var
      envFrom:
        - configMapRef:
            name: myconfig

#kubectl apply -f configmap.yaml
#kubectl logs myapp
#kubectl delete -f configmap.yml

Secret

apiVersion: v1
kind: Secret
metadata:
  name: my-secret
type: Opaque
#$echo "test" | base64
data:
  testpassword: dGVzdAo=
---
apiVersion: v1
kind: Pod
metadata:
  name: env-pod
spec:
  containers:
    - name: test
      image: alpine
      command: ['env']
      env:
        - name: USER
          valueFrom:
            secretKeyRef:
              name: my-secret
              key: testpassword
		              
#kubectl describe secrets
#kubectl get secret my-secret -o jsonpath='{.data}'
#echo '[encoded-value]' | base64 --decode
#kubectl delete secret my-secret           

Volume( A Directory accessible to all containers running in a pod.)

apiVersion: v1
kind: ConfigMap
metadata:
  name: test-file
data:
  data.csv: |
    name,age
    sam,1,
    tom,2
---
apiVersion: v1
kind: Pod
metadata:
  name: test-pod
spec:
  restartPolicy: Never
  containers:
    - name: busybox
      image: k8s.gcr.io/busybox
      command: [ "sleep", "3600" ]
      volumeMounts:
        - name: config
          mountPath: "/datadir"
          readOnly: true
  volumes:
    - name: config
      configMap:
        name: test-file

#kubectl apply -f configmap-file.yaml
#kubectl get pod
#kubectl exec -it test-pod sh
# $cat datadir/data.csv
#kubectl delete -f configmap-file.yaml

Create vs Apply :

Create new Only (Cannot give pod in command) :

-	kubectl create -f manifest.yml	#create new Pod  

Create new / Overwrite existing x 3 :

-   kubectl apply -f manifest.yaml   
-   kubectl apply -f https://git.io/vPieo
-   cat <<EOF | kubectl apply -f -
        apiVersion: v1
        kind: Pod
        metadata:
        name: busybox-sleep
        spec:
        containers:
        - name: busybox
            image: busybox:1.28
            args:
            - sleep
            - "1000000"
        EOF

Examples

Pre-Req:

  • $minikube
  • make sure Docker application is running
  • run "eval $(minikube docker-env)" #make use of docker in minikube Docker Tutorial

Example Online:

labs.play-with-docker.com

  • Remove Taint :

          kubectl get nodes //copy node name
          kubectl describe node node1| grep -i taint      //copy name 
          kubectl taint node nodename pasteabove-
    
  • Create namespace:

          kubectl create namespaces somename                                                                              
          kubectl config set-context --current --namespace=somename
    

Example : Using hi.yml file (create)

apiVersion: v1
kind: Pod
# Pod / Job
metadata:
 name: hi
spec:
 containers:
   - name: hi
     image: ubuntu
     command: ['sh', '-c', 'echo "Hello, Kubernetes!" && sleep 3600']
     # imagePullPolicy: Never

kubectl create -f hi.yml
kubectl exec -it hi -- /bin/bash

Activities

Activity : Pull local Docker image into minikube

Create a file with name Dockerfile
Add below lines :
		FROM alpine:3.4
		RUN apk update
		RUN apk add vim 
		RUN apk add curl
open new terminal
minikube start
eval $(minikube docker-env)
docker build -t foo:1.0 . 
docker images #Check if foo is created
kubectl run foo -it --image=foo:1.0 
		-   $cat /proc/version
		-   $exit
kubectl get pods
kubectl get deployments 
kubectl delete deployment foo

Activity :Create "Hello world" python program push and pull from remote Docker Pre-Req:

  • Linux machine
  • Goto Docker-hub and create a account
  • Login to Docker-hub and create a repository
  • Restart your system and Open Bios
  • Enable vt-x / virtualization
  • Turn on Docker application
  • Make sure
  • Create a folder called apps
  • Create 3 files inside it .

Create deployment.yaml

	apiVersion: v1
	kind: Pod
	metadata:
		name: foo
	spec:
		containers:
		- name: whatever
			image: index.docker.io/usn/repo_name:latest
			imagePullPolicy: Always
			imagePullSecrets:
			- name: my_registry

main.py

print('hi')  

Dockerfile" (no extension)

FROM python:3.7
RUN mkdir /app
WORKDIR /app
ADD . /app/
EXPOSE 5000
CMD ["python","-u", "/app/main.py"]

Steps:

-   cd into apps
-   sudo docker images #empty table
-   sudo docker build -t any_name:v1 . # note there is  '.' at the end
-   sudo docker images 
-   sudo docker run -p 4000:80 any_name 
-   sudo docker images #note down the id /name, usually it is latest
-   sudo docker login
-   sudo docker tag TAG_id usn/repo_name:TAG_NAME_of_image
#docker tag 3a4677d31cde usn/test_repo:latest
-   sudo docker push usn/repo_name:TAG_NAME_of_image
#docker push usn/repo:latest
-   kubectl apply -f deployment.yaml #pull image from docker hub & create pod
-   kubectl logs -f foo #hi
-   kubectl get pods #shows all pods
-   kubectl delete pod pod_name #to delete pod

#Status =CrashLoopBackOff. Because we just have 1 print statement , so whenever the application was closing after "hi".The pod try to restart service and had done it mutiple times

Activity : Send arguments from different terminal

kubectl attach redis_container -i

Activity :Forward ports from Pods to your local machine

kubectl port-forward redis-izl09 6379
telnet localhost 6379 	#nc -l -p 6379  

Ref: https://kubernetesbyexample.com/ https://training.play-with-docker.com/ops-s1-hello/

Saturday, May 15, 2021

Django - Simple Registration Form

Django - Simple Registration Form 

 


<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
<p>Register</p>

<form method="POST" action="">
{% csrf_token %}
{{form.username.label}}
{{form.username}}
{{form.email.label}}
{{form.email}}
{{form.password1.label}}
{{form.password1}}
{{form.password2.label}}
{{form.password2}}
<input type="submit" name="new_user">


</form>


</head>
<body>

</body>
</html>

Monday, March 22, 2021

CORRELATED SUBQUERIES


SubQuery : 
Simple subquery doesn't use values from the outer query and is being calculated only once:

SELECT id, first_name FROM student_details
WHERE id IN (SELECT student_id FROM student_subjects
WHERE subject= 'Science');
 
CoRelated Subquery  -
Query To Find all employees whose salary is above average for their department

SELECT employee_number, name FROM employees emp
WHERE salary > ( SELECT AVG(salary) FROM employees
WHERE department = emp.department);

Saturday, March 13, 2021

FastApi : Create Production ready Api

 FastApi : Create Production ready Api (faster than Flask)


https://fastapi.tiangolo.com/

Features

1. Asynchronous

2. High Perfromance

3. Less Code

4. Data Type  and Data Models auto Conversions

5. Auto Documentation

    - swagger (/docs)

    - ReDoc   (/redoc)


Pre-Req:

Install and activate virtual environment to be safe.

Steps:

  1. pip install fastapi
  2. pip install hypercorn #server
  3. touch main.py
  4. copy paste below code
  5. hypercorn main:app --reload


from fastapi import FastAPI
from pydantic import BaseModel #model
# import requests

app = FastAPI()
db = []

class Person(BaseModel):
name: str

@app.get('/')
def index():return {'person' : 'name'}

@app.get('/persons')
def get_persons():
results = []
for person in db:results.append(person)
return results

@app.get('/persons/{person_id}')
def get_person(person_id: int):return db[person_id-1]

@app.post('/persons')
def create_person(person: Person):
db.append(person)
return db[-1]

@app.delete('/persons/{person_id}')
def delete_person(person_id: int):
db.pop(person_id-1)
return {}

Friday, February 5, 2021

Raspberry Pi Pico

Raspberry Pi Pico

Micro Python:

1. Open Source
2. Modules are cheaper 	

Circuit Python :

1. Fork of Micro Python 
2. Owned by Ada Fruit
3. Compatible with Only Ada Fruit Modules
4. Expensive 
5. No ConCurrency and No State Sharing 

Pre- Req:

  1. Download the below files Micro Python / Circuit Python
    1. Micro Python (Recommended) - MicroPython
    2. Circuit Python UF2 file from - https://circuitpython.org/board/raspberry_pi_pico/
  2. Hold Raspberry Pi Pico on-board BOOTSEL button (Button on the board)
  3. Plug it into USB (or pulling down the RUN/Reset pin to ground)
  4. It will appear as a USB disk drive
  5. you can copy paste the firmware onto drive
  6. the drive will change for Circuit python and no reaction for Micro Python

Micro Python :

  1. Install and open thonny IDE
  2. IDE should automatically detect pico

Shell

from machine import Pin
led = Pin("LED", Pin.OUT)
led.value(1)
led.value(0)

Code - save as main.py in "MicroPython" device

from machine import Pin
import time
led = Pin("LED", Pin.OUT)  # Pin(25, Pin.OUT)
for i in range(1, 5):
    print(i)
    led.value(1)
    time.sleep(1)  # Use 1 second instead of 10 seconds for better visibility
    led.value(0)
    time.sleep(1)

Circuit Python :

Configure Mu Editor so that Code can be seen running in real time., ie as soon as the code is saved , the result reflected in LEDs directly .

  1. sudo apt-get update
  2. sudo apt-get -f upgrade
  3. apt install libfuse2
  4. Download and Install Mu Editor

Run

  1. Copy paste below program into code.py
  2. Save the file in the device
  3. Open Mu Editor
  4. Should automatically recognise PICO and Opens code.py

Blink Program

import board
import time
from digitalio import DigitalInOut, Direction,Pull
led = DigitalInOut(board.LED)
led.direction = Direction.OUTPUT
#Connect LED between Pin1 ie GP0 and Pin 2
op = DigitalInOut(board.GP0)
op.direction = Direction.OUTPUT

while 1:
    if led.value==0: led.value= True
    elif led.value==1:led.value = False
    time.sleep(0.5)
    
    if op.value==0: op.value= True
    elif op.value==1:op.value = False
    time.sleep(0.5)

Input Switch

    import time
    import board
    import digitalio
    button = digitalio.DigitalInOut(board.GP0)
    button.switch_to_input(pull=digitalio.Pull.UP )
    while True:
            print(button.value)
            time.sleep(0.5)

https://learn.adafruit.com/getting-started-with-raspberry-pi-pico-circuitpython/blinky-and-a-button https://www.youtube.com/watch?v=nYA4PVljE4Q

view raw micropython.md hosted with ❤ by GitHub

Sunday, January 31, 2021

CIRCUITPYTHON :SEEEDUINO XIAO [SEED]

 CIRCUITPYTHON :SEEEDUINO XIAO

Steps:

 Uses SAMD21 Processor

1. connect SEEEDuino xiao to PC using TYPE-C cable
2. short RST pins using a cable fast , 2 times.
3. Once done successfully,Audrino drives appears
4. Go website -

https://circuitpython.org/board/seeeduino_xiao/

https://wiki.seeedstudio.com/Seeeduino-XIAO-CircuitPython/

5. Download latest .UF2 file
6. Copy and paste it inside the drive
7. Now the drive will be converted to CIRCUITPY
8. Create a file code.py
9. Copy paste below code into code.py (same for all circuit py IC)

import time
import board
from digitalio import DigitalInOut,Direction


led = DigitalInOut(board.D13) #D13 is a built in LED

#A1 - A10 can be used as well if u use a separate LED and a Resistor 100 - 400 ohms refer below for calculations
led.direction=tinker .OUTPUT

while True:
    led.value = True
    time.sleep(1)
    led.value=False
    time.sleep(1)

   


 

 

10. Save file
11. The LED should start blinking


A simple LED circuit consists of a LED and resistor. The resistor is used to limit the current that is being drawn and is called a current limiting resistor. Without the resistor the LED would run at too high of a voltage, resulting in too much current being drawn which in turn would instantly burn the LED, and likely also the GPIO port.

To calculate the resistor value we need to examine the specifications of the LED. Specifically we need to find the forward voltage (VF) and the forward current (IF). 

A regular red LED has a 

forward voltage (VF) of 1.7V 

 forward current of 20mA (IF). 

output voltage of the IC which is 3.3V.

We can then calculate the resistor needed-


RΩ=VI=3.3VFIF=3.31.720mA=80Ω




VI=3.3=3.31.7